text
stringlengths
7
1.01M
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 4.0.2 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package org.opensim.modeling; public class StdMapStringDataAdapter extends java.util.AbstractMap<String, DataAdapter> { private transient long swigCPtr; protected transient boolean swigCMemOwn; public StdMapStringDataAdapter(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } public static long getCPtr(StdMapStringDataAdapter obj) { return (obj == null) ? 0 : obj.swigCPtr; } @SuppressWarnings("deprecation") protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; opensimCommonJNI.delete_StdMapStringDataAdapter(swigCPtr); } swigCPtr = 0; } } public int size() { return sizeImpl(); } public boolean containsKey(java.lang.Object key) { if (!(key instanceof String)) { return false; } return containsImpl((String)key); } public DataAdapter get(java.lang.Object key) { if (!(key instanceof String)) { return null; } Iterator itr = find((String) key); if (itr.isNot(end())) { return itr.getValue(); } return null; } public DataAdapter put(String key, DataAdapter value) { Iterator itr = find((String) key); if (itr.isNot(end())) { DataAdapter oldValue = itr.getValue(); itr.setValue(value); return oldValue; } else { putUnchecked(key, value); return null; } } public DataAdapter remove(java.lang.Object key) { if (!(key instanceof String)) { return null; } Iterator itr = find((String) key); if (itr.isNot(end())) { DataAdapter oldValue = itr.getValue(); removeUnchecked(itr); return oldValue; } else { return null; } } public java.util.Set<Entry<String, DataAdapter>> entrySet() { java.util.Set<Entry<String, DataAdapter>> setToReturn = new java.util.HashSet<Entry<String, DataAdapter>>(); Iterator itr = begin(); final Iterator end = end(); while (itr.isNot(end)) { setToReturn.add(new Entry<String, DataAdapter>() { private Iterator iterator; private Entry<String, DataAdapter> init(Iterator iterator) { this.iterator = iterator; return this; } public String getKey() { return iterator.getKey(); } public DataAdapter getValue() { return iterator.getValue(); } public DataAdapter setValue(DataAdapter newValue) { DataAdapter oldValue = iterator.getValue(); iterator.setValue(newValue); return oldValue; } }.init(itr)); itr = itr.getNextUnchecked(); } return setToReturn; } public StdMapStringDataAdapter() { this(opensimCommonJNI.new_StdMapStringDataAdapter__SWIG_0(), true); } public StdMapStringDataAdapter(StdMapStringDataAdapter other) { this(opensimCommonJNI.new_StdMapStringDataAdapter__SWIG_1(StdMapStringDataAdapter.getCPtr(other), other), true); } static protected class Iterator { private transient long swigCPtr; protected transient boolean swigCMemOwn; public Iterator(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } public static long getCPtr(Iterator obj) { return (obj == null) ? 0 : obj.swigCPtr; } @SuppressWarnings("deprecation") protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; opensimCommonJNI.delete_StdMapStringDataAdapter_Iterator(swigCPtr); } swigCPtr = 0; } } private StdMapStringDataAdapter.Iterator getNextUnchecked() { return new StdMapStringDataAdapter.Iterator(opensimCommonJNI.StdMapStringDataAdapter_Iterator_getNextUnchecked(swigCPtr, this), true); } private boolean isNot(StdMapStringDataAdapter.Iterator other) { return opensimCommonJNI.StdMapStringDataAdapter_Iterator_isNot(swigCPtr, this, StdMapStringDataAdapter.Iterator.getCPtr(other), other); } private String getKey() { return opensimCommonJNI.StdMapStringDataAdapter_Iterator_getKey(swigCPtr, this); } private DataAdapter getValue() { long cPtr = opensimCommonJNI.StdMapStringDataAdapter_Iterator_getValue(swigCPtr, this); return (cPtr == 0) ? null : new DataAdapter(cPtr, true); } private void setValue(DataAdapter newValue) { opensimCommonJNI.StdMapStringDataAdapter_Iterator_setValue(swigCPtr, this, DataAdapter.getCPtr(newValue), newValue); } } public boolean isEmpty() { return opensimCommonJNI.StdMapStringDataAdapter_isEmpty(swigCPtr, this); } public void clear() { opensimCommonJNI.StdMapStringDataAdapter_clear(swigCPtr, this); } private StdMapStringDataAdapter.Iterator find(String key) { return new StdMapStringDataAdapter.Iterator(opensimCommonJNI.StdMapStringDataAdapter_find(swigCPtr, this, key), true); } private StdMapStringDataAdapter.Iterator begin() { return new StdMapStringDataAdapter.Iterator(opensimCommonJNI.StdMapStringDataAdapter_begin(swigCPtr, this), true); } private StdMapStringDataAdapter.Iterator end() { return new StdMapStringDataAdapter.Iterator(opensimCommonJNI.StdMapStringDataAdapter_end(swigCPtr, this), true); } private int sizeImpl() { return opensimCommonJNI.StdMapStringDataAdapter_sizeImpl(swigCPtr, this); } private boolean containsImpl(String key) { return opensimCommonJNI.StdMapStringDataAdapter_containsImpl(swigCPtr, this, key); } private void putUnchecked(String key, DataAdapter value) { opensimCommonJNI.StdMapStringDataAdapter_putUnchecked(swigCPtr, this, key, DataAdapter.getCPtr(value), value); } private void removeUnchecked(StdMapStringDataAdapter.Iterator itr) { opensimCommonJNI.StdMapStringDataAdapter_removeUnchecked(swigCPtr, this, StdMapStringDataAdapter.Iterator.getCPtr(itr), itr); } }
package de.nevini.modules.osu.mappers; import de.nevini.modules.osu.api.model.OsuApiBeatmap; import de.nevini.modules.osu.data.*; import de.nevini.modules.osu.model.*; import lombok.NonNull; import static de.nevini.modules.osu.mappers.OsuMapperUtils.*; public class OsuBeatmapMapper { public static @NonNull OsuBeatmapDataWrapper map(@NonNull OsuApiBeatmap beatmap, OsuScore score) { return new OsuBeatmapDataWrapper( new OsuBeatmapData( beatmap.getBeatmapId(), beatmap.getBeatmapsetId(), beatmap.getBpm(), beatmap.getDifficultySize(), beatmap.getDifficultyOverall(), beatmap.getDifficultyApproach(), beatmap.getDifficultyDrain(), beatmap.getHitLength(), beatmap.getTotalLength(), beatmap.getVersion(), beatmap.getFileMd5(), beatmap.getMode(), beatmap.getFavouriteCount(), beatmap.getRating(), beatmap.getPlayCount(), beatmap.getPassCount(), beatmap.getCountNormal(), beatmap.getCountSlider(), beatmap.getCountSpinner() ), new OsuBeatmapsetData( beatmap.getBeatmapsetId(), beatmap.getApproved(), convertDate(beatmap.getSubmitDate()), convertDate(beatmap.getApprovedDate()), convertDate(beatmap.getLastUpdate()), beatmap.getArtist(), beatmap.getCreatorName(), beatmap.getCreatorId(), beatmap.getSource(), beatmap.getGenre(), beatmap.getLanguage(), beatmap.getTitle(), beatmap.getTags(), beatmap.getDownloadUnavailable(), beatmap.getAudioUnavailable() ), new OsuBeatmapDifficultyData( beatmap.getBeatmapId(), score != null ? convertMode(score.getMode()) : beatmap.getMode(), score != null ? convertMods(score.getMods()) : OsuMod.NONE, beatmap.getDifficultyRating(), beatmap.getDifficultyAim(), beatmap.getDifficultySpeed(), beatmap.getMaxCombo(), score != null ? score.getPp() : null ) ); } public static @NonNull OsuBeatmap map(@NonNull OsuBeatmapDataWrapper wrapper) { return new OsuBeatmap( convertApproved(wrapper.getBeatmapset().getApproved()), wrapper.getBeatmapset().getSubmitDate(), wrapper.getBeatmapset().getApprovedDate(), wrapper.getBeatmapset().getLastUpdate(), wrapper.getBeatmapset().getArtist(), wrapper.getBeatmap().getBeatmapId(), wrapper.getBeatmap().getBeatmapsetId(), wrapper.getBeatmap().getBpm(), wrapper.getBeatmapset().getCreatorName(), wrapper.getBeatmapset().getCreatorId(), wrapper.getDifficulty().getDifficultyRating(), wrapper.getDifficulty().getDifficultyAim(), wrapper.getDifficulty().getDifficultySpeed(), wrapper.getBeatmap().getDifficultySize(), wrapper.getBeatmap().getDifficultyOverall(), wrapper.getBeatmap().getDifficultyApproach(), wrapper.getBeatmap().getDifficultyDrain(), wrapper.getBeatmap().getHitLength(), wrapper.getBeatmapset().getSource(), convertGenre(wrapper.getBeatmapset().getGenre()), convertLanguage(wrapper.getBeatmapset().getLanguage()), wrapper.getBeatmapset().getTitle(), wrapper.getBeatmap().getTotalLength(), wrapper.getBeatmap().getVersion(), wrapper.getBeatmap().getFileMd5(), convertMode(wrapper.getBeatmap().getMode()), convertMode(wrapper.getDifficulty().getMode()), convertMods(wrapper.getDifficulty().getMods()), wrapper.getBeatmapset().getTags(), wrapper.getBeatmap().getFavouriteCount(), wrapper.getBeatmap().getRating(), wrapper.getBeatmap().getPlayCount(), wrapper.getBeatmap().getPassCount(), wrapper.getBeatmap().getCountNormal(), wrapper.getBeatmap().getCountSlider(), wrapper.getBeatmap().getCountSpinner(), wrapper.getDifficulty().getMaxCombo(), wrapper.getDifficulty().getMaxPp(), wrapper.getBeatmapset().getDownloadUnavailable(), wrapper.getBeatmapset().getAudioUnavailable() ); } public static OsuBeatmap map(@NonNull OsuBeatmapViewData view) { return new OsuBeatmap( convertApproved(view.getApproved()), view.getSubmitDate(), view.getApprovedDate(), view.getLastUpdate(), view.getArtist(), view.getBeatmapId(), view.getBeatmapsetId(), view.getBpm(), view.getCreatorName(), view.getCreatorId(), view.getDifficultyRating(), view.getDifficultyAim(), view.getDifficultySpeed(), view.getDifficultySize(), view.getDifficultyOverall(), view.getDifficultyApproach(), view.getDifficultyDrain(), view.getHitLength(), view.getSource(), convertGenre(view.getGenre()), convertLanguage(view.getLanguage()), view.getTitle(), view.getTotalLength(), view.getVersion(), view.getFileMd5(), convertMode(view.getMode()), convertMode(view.getMode()), convertMods(view.getMods()), view.getTags(), view.getFavouriteCount(), view.getRating(), view.getPlayCount(), view.getPassCount(), view.getCountNormal(), view.getCountSlider(), view.getCountSpinner(), view.getMaxCombo(), view.getMaxPp(), view.getDownloadUnavailable(), view.getAudioUnavailable() ); } private static OsuStatus convertApproved(Integer value) { if (value != null) { for (OsuStatus e : OsuStatus.values()) { if (e.getId() == value) { return e; } } } return null; } private static OsuGenre convertGenre(Integer value) { if (value != null) { for (OsuGenre e : OsuGenre.values()) { if (e.getId() == value) { return e; } } } return null; } private static OsuLanguage convertLanguage(Integer value) { if (value != null) { for (OsuLanguage e : OsuLanguage.values()) { if (e.getId() == value) { return e; } } } return null; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.cameltooling.dap.internal.model.variables.message; import org.apache.camel.api.management.mbean.ManagedBacklogDebuggerMBean; import com.github.cameltooling.dap.internal.model.variables.CamelVariable; public class MessageBodyCamelVariable extends CamelVariable { public static final String NAME = "Body"; private String breakpointId; public MessageBodyCamelVariable(String breakpointId, String body) { this.breakpointId = breakpointId; setName(NAME); setValue(body); } @Override public void updateValue(ManagedBacklogDebuggerMBean debugger, String value) { debugger.setMessageBodyOnBreakpoint(breakpointId, value); setValue(value); } }
/* * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending */ package io.deephaven.engine.table.impl; import io.deephaven.chunk.LongChunk; import io.deephaven.chunk.WritableChunk; import io.deephaven.chunk.WritableLongChunk; import io.deephaven.chunk.WritableObjectChunk; import io.deephaven.chunk.attributes.Values; import io.deephaven.engine.rowset.RowSet; import io.deephaven.engine.rowset.RowSetBuilderRandom; import io.deephaven.engine.rowset.RowSetFactory; import io.deephaven.engine.rowset.WritableRowSet; import io.deephaven.engine.rowset.chunkattributes.RowKeys; import io.deephaven.engine.table.ColumnSource; import io.deephaven.engine.table.Table; import io.deephaven.engine.table.impl.perf.PerformanceEntry; import io.deephaven.engine.table.impl.perf.UpdatePerformanceTracker; import io.deephaven.engine.table.impl.replay.Replayer; import io.deephaven.engine.table.impl.sources.FillUnordered; import io.deephaven.engine.updategraph.UpdateGraphProcessor; import io.deephaven.engine.updategraph.UpdateSourceRegistrar; import io.deephaven.engine.util.TableTools; import io.deephaven.function.Numeric; import io.deephaven.internal.log.LoggerFactory; import io.deephaven.io.logger.Logger; import io.deephaven.time.DateTime; import io.deephaven.time.DateTimeUtils; import io.deephaven.time.TimeProvider; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import static io.deephaven.util.type.TypeUtils.box; /** * A TimeTable adds rows at a fixed interval with a single column named "Timestamp". * * To create a TimeTable, you should use the {@link TableTools#timeTable} family of methods. */ public class TimeTable extends QueryTable implements Runnable { private static final Logger log = LoggerFactory.getLogger(TimeTable.class); public static class Builder { private UpdateSourceRegistrar registrar = UpdateGraphProcessor.DEFAULT; private TimeProvider timeProvider; private DateTime startTime; private long period; private boolean streamTable; public Builder registrar(UpdateSourceRegistrar registrar) { this.registrar = registrar; return this; } public Builder timeProvider(TimeProvider timeProvider) { this.timeProvider = timeProvider; return this; } public Builder startTime(DateTime startTime) { this.startTime = startTime; return this; } public Builder startTime(String startTime) { this.startTime = DateTimeUtils.convertDateTime(startTime); return this; } public Builder period(long period) { this.period = period; return this; } public Builder period(String period) { this.period = DateTimeUtils.expressionToNanos(period); return this; } public Builder streamTable(boolean streamTable) { this.streamTable = streamTable; return this; } public QueryTable build() { return new TimeTable(registrar, timeProvider == null ? Replayer.getTimeProvider(null) : timeProvider, startTime, period, streamTable); } } public static Builder newBuilder() { return new Builder(); } private static final String TIMESTAMP = "Timestamp"; private long lastIndex = -1; private final SyntheticDateTimeSource columnSource; private final TimeProvider timeProvider; private final PerformanceEntry entry; private final boolean isStreamTable; public TimeTable(UpdateSourceRegistrar registrar, TimeProvider timeProvider, @Nullable DateTime startTime, long period, boolean isStreamTable) { super(RowSetFactory.empty().toTracking(), initColumn(startTime, period)); this.isStreamTable = isStreamTable; final String name = isStreamTable ? "TimeTableStream" : "TimeTable"; this.entry = UpdatePerformanceTracker.getInstance().getEntry(name + "(" + startTime + "," + period + ")"); columnSource = (SyntheticDateTimeSource) getColumnSourceMap().get(TIMESTAMP); this.timeProvider = timeProvider; if (isStreamTable) { setAttribute(Table.STREAM_TABLE_ATTRIBUTE, Boolean.TRUE); } else { setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, Boolean.TRUE); setFlat(); } if (startTime != null) { refresh(false); } registrar.addSource(this); } private static Map<String, ColumnSource<?>> initColumn(DateTime firstTime, long period) { if (period <= 0) { throw new IllegalArgumentException("Invalid time period: " + period + " nanoseconds"); } return Collections.singletonMap(TIMESTAMP, new SyntheticDateTimeSource(firstTime, period)); } @Override public void run() { refresh(true); } private void refresh(final boolean notifyListeners) { entry.onUpdateStart(); try { final DateTime dateTime = timeProvider.currentTime(); long rangeStart = lastIndex + 1; if (columnSource.startTime == null) { lastIndex = 0; columnSource.startTime = new DateTime( Numeric.lowerBin(dateTime.getNanos(), columnSource.period)); } else if (dateTime.compareTo(columnSource.startTime) >= 0) { lastIndex = Math.max(lastIndex, DateTimeUtils.minus(dateTime, columnSource.startTime) / columnSource.period); } final boolean rowsAdded = rangeStart <= lastIndex; final boolean rowsRemoved = isStreamTable && getRowSet().isNonempty(); if (rowsAdded || rowsRemoved) { final RowSet addedRange = rowsAdded ? RowSetFactory.fromRange(rangeStart, lastIndex) : RowSetFactory.empty(); final RowSet removedRange = rowsRemoved ? RowSetFactory.fromRange(getRowSet().firstRowKey(), rangeStart - 1) : RowSetFactory.empty(); if (rowsAdded) { getRowSet().writableCast().insertRange(rangeStart, lastIndex); } if (rowsRemoved) { getRowSet().writableCast().removeRange(0, rangeStart - 1); } if (notifyListeners) { notifyListeners(addedRange, removedRange, RowSetFactory.empty()); } } } finally { entry.onUpdateEnd(); } } @Override protected void destroy() { super.destroy(); UpdateGraphProcessor.DEFAULT.removeSource(this); } private static final class SyntheticDateTimeSource extends AbstractColumnSource<DateTime> implements ImmutableColumnSourceGetDefaults.LongBacked<DateTime>, FillUnordered { private DateTime startTime; private final long period; private SyntheticDateTimeSource(DateTime startTime, long period) { super(DateTime.class); this.startTime = startTime; this.period = period; } private DateTime computeDateTime(long rowKey) { return DateTimeUtils.plus(startTime, period * rowKey); } @Override public DateTime get(long rowKey) { return computeDateTime(rowKey); } private long computeNanos(long rowKey) { return startTime.getNanos() + period * rowKey; } @Override public long getLong(long rowKey) { return computeNanos(rowKey); } @Override public WritableRowSet match(boolean invertMatch, boolean usePrev, boolean caseInsensitive, RowSet selection, Object... keys) { if (startTime == null) { // there are no valid rows for this column source yet return RowSetFactory.empty(); } final RowSetBuilderRandom matchingSet = RowSetFactory.builderRandom(); for (Object o : keys) { if (!(o instanceof DateTime)) { continue; } final DateTime key = (DateTime) o; if (key.getNanos() % period != startTime.getNanos() % period || DateTimeUtils.isBefore(key, startTime)) { continue; } matchingSet.addKey(DateTimeUtils.minus(key, startTime) / period); } if (invertMatch) { try (final WritableRowSet matching = matchingSet.build()) { return selection.minus(matching); } } final WritableRowSet matching = matchingSet.build(); matching.retain(selection); return matching; } @Override public Map<DateTime, RowSet> getValuesMapping(RowSet subRange) { final Map<DateTime, RowSet> result = new LinkedHashMap<>(); subRange.forAllRowKeys( ii -> result.put(computeDateTime(ii), RowSetFactory.fromKeys(ii))); return result; } @Override public <ALTERNATE_DATA_TYPE> boolean allowsReinterpret( @NotNull final Class<ALTERNATE_DATA_TYPE> alternateDataType) { return alternateDataType == long.class; } @Override public <ALTERNATE_DATA_TYPE> ColumnSource<ALTERNATE_DATA_TYPE> doReinterpret( @NotNull Class<ALTERNATE_DATA_TYPE> alternateDataType) { // noinspection unchecked return (ColumnSource<ALTERNATE_DATA_TYPE>) new SyntheticDateTimeAsLongSource(); } @Override public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk<? super Values> dest, @NotNull LongChunk<? extends RowKeys> keys) { final WritableObjectChunk<DateTime, ? super Values> objectDest = dest.asWritableObjectChunk(); objectDest.setSize(keys.size()); for (int ii = 0; ii < keys.size(); ++ii) { objectDest.set(ii, computeDateTime(keys.get(ii))); } } @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk<? super Values> dest, @NotNull LongChunk<? extends RowKeys> keys) { fillChunkUnordered(context, dest, keys); } @Override public boolean providesFillUnordered() { return true; } private class SyntheticDateTimeAsLongSource extends AbstractColumnSource<Long> implements ImmutableColumnSourceGetDefaults.LongBacked<Long>, FillUnordered { SyntheticDateTimeAsLongSource() { super(Long.class); } @Override public Long get(long rowKey) { return box(computeNanos(rowKey)); } @Override public long getLong(long rowKey) { return computeNanos(rowKey); } @Override public WritableRowSet match(boolean invertMatch, boolean usePrev, boolean caseInsensitive, RowSet selection, Object... keys) { if (startTime == null) { // there are no valid rows for this column source yet return RowSetFactory.empty(); } final RowSetBuilderRandom matchingSet = RowSetFactory.builderRandom(); for (Object o : keys) { if (!(o instanceof Long)) { continue; } final long key = (Long) o; if (key % period != startTime.getNanos() % period || key < startTime.getNanos()) { continue; } matchingSet.addKey((key - startTime.getNanos()) / period); } if (invertMatch) { try (final WritableRowSet matching = matchingSet.build()) { return selection.minus(matching); } } final WritableRowSet matching = matchingSet.build(); matching.retain(selection); return matching; } @Override public Map<Long, RowSet> getValuesMapping(RowSet subRange) { final Map<Long, RowSet> result = new LinkedHashMap<>(); subRange.forAllRowKeys( ii -> result.put(box(computeNanos(ii)), RowSetFactory.fromKeys(ii))); return result; } @Override public <ALTERNATE_DATA_TYPE> boolean allowsReinterpret( @NotNull final Class<ALTERNATE_DATA_TYPE> alternateDataType) { return alternateDataType == DateTime.class; } @Override public <ALTERNATE_DATA_TYPE> ColumnSource<ALTERNATE_DATA_TYPE> doReinterpret( @NotNull Class<ALTERNATE_DATA_TYPE> alternateDataType) { // noinspection unchecked return (ColumnSource<ALTERNATE_DATA_TYPE>) SyntheticDateTimeSource.this; } @Override public void fillChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk<? super Values> dest, @NotNull LongChunk<? extends RowKeys> keys) { final WritableLongChunk<? super Values> longDest = dest.asWritableLongChunk(); longDest.setSize(keys.size()); for (int ii = 0; ii < keys.size(); ++ii) { longDest.set(ii, computeNanos(keys.get(ii))); } } @Override public void fillPrevChunkUnordered(@NotNull FillContext context, @NotNull WritableChunk<? super Values> dest, @NotNull LongChunk<? extends RowKeys> keys) { fillChunkUnordered(context, dest, keys); } @Override public boolean providesFillUnordered() { return true; } } } }
package com.algokelvin.otto.bus; import android.os.Handler; import android.os.Looper; import com.squareup.otto.Bus; public class MainThreadBus extends Bus { private final Handler mHandler = new Handler(Looper.getMainLooper()); @Override public void post(final Object event) { //Determine whether it is the main thread looper, whether this thread is the main thread if (Looper.myLooper() == Looper.getMainLooper()) { super.post(event); } else { mHandler.post(new Runnable() { @Override public void run() { MainThreadBus.super.post(event); } }); } } }
/* * Copyright 2017-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glowroot.agent.plugin.servlet; import org.glowroot.agent.plugin.api.Agent; import org.glowroot.agent.plugin.api.OptionalThreadContext; import org.glowroot.agent.plugin.api.TimerName; import org.glowroot.agent.plugin.api.TraceEntry; import org.glowroot.agent.plugin.api.checker.Nullable; import org.glowroot.agent.plugin.api.weaving.BindReceiver; import org.glowroot.agent.plugin.api.weaving.BindThrowable; import org.glowroot.agent.plugin.api.weaving.BindTraveler; import org.glowroot.agent.plugin.api.weaving.OnBefore; import org.glowroot.agent.plugin.api.weaving.OnReturn; import org.glowroot.agent.plugin.api.weaving.OnThrow; import org.glowroot.agent.plugin.api.weaving.Pointcut; import org.glowroot.agent.plugin.api.weaving.Shim; public class WebLogicAppStartupAspect { @Shim("weblogic.servlet.internal.WebAppServletContext") public interface WebAppServletContext { @Nullable String getContextPath(); } @Pointcut(className = "weblogic.servlet.internal.WebAppServletContext", methodName = "start", methodParameterTypes = {}, nestingGroup = "servlet-startup", timerName = "startup") public static class StartAdvice { private static final TimerName timerName = Agent.getTimerName(StartAdvice.class); @OnBefore public static TraceEntry onBefore(OptionalThreadContext context, @BindReceiver WebAppServletContext webAppServletContext) { String path = webAppServletContext.getContextPath(); return ContainerStartup.onBeforeCommon(context, path, timerName); } @OnReturn public static void onReturn(@BindTraveler TraceEntry traceEntry) { traceEntry.end(); } @OnThrow public static void onThrow(@BindThrowable Throwable t, @BindTraveler TraceEntry traceEntry) { traceEntry.endWithError(t); } } }
/* * * * Copyright © 2017-2018 minsx.com All rights reserved * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package com.minsx.appmanager.web.entity; import com.minsx.appmanager.core.task.exeption.TaskRestartException; import com.minsx.appmanager.core.task.exeption.ShellInternalException; import com.minsx.appmanager.core.task.Task; import com.minsx.appmanager.core.task.exeption.TaskStartException; import com.minsx.appmanager.core.task.exeption.TaskStopException; import com.minsx.framework.common.shell.core.Shell; import javax.persistence.*; import java.io.Serializable; import java.util.Date; @Entity @Table(name = "application") public class Application extends BaseEntity implements Serializable, Task { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(nullable = false, name = "id") private Integer id; private String appName; private String command; private String environments; private String inPath; private Date beginTime; private Date endTime; private String charset = "UTF-8"; private Boolean stopOnErr = false; private Integer bufferLength = 5000; @Column(columnDefinition = "text") private StringBuffer outBuffer = new StringBuffer(); @Column(columnDefinition = "text") private StringBuffer errBuffer = new StringBuffer(); @Transient private final static String SPLIT_STR = ";"; @Transient private Shell shell; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } @Override public String getAppName() { return appName; } @Override public void setAppName(String appName) { this.appName = appName; } @Override public String getCommand() { return command; } @Override public void setCommand(String command) { this.command = command; } @Override public String[] getEnvironments() { return environments == null ? null : environments.split(SPLIT_STR); } @Override public void setEnvironments(String[] environments) { if (environments != null) { StringBuilder str = new StringBuilder(); for (String e : environments) { str.append(e).append(SPLIT_STR); } str.delete(str.length() - 1, str.length()); this.environments = str.toString(); } } @Override public String getInPath() { return inPath; } @Override public void setInPath(String inPath) { this.inPath = inPath; } @Override public String getCharset() { return charset; } @Override public void setCharset(String charset) { this.charset = charset; } @Override public StringBuffer getOutBuffer() { return outBuffer; } public void setOutBuffer(StringBuffer outBuffer) { this.outBuffer = outBuffer; } @Override public StringBuffer getErrBuffer() { return errBuffer; } public void setErrBuffer(StringBuffer errBuffer) { this.errBuffer = errBuffer; } @Override public Date getBeginTime() { return beginTime; } public void setBeginTime(Date beginTime) { this.beginTime = beginTime; } @Override public Date getEndTime() { return endTime; } public void setEndTime(Date endTime) { this.endTime = endTime; } @Override public Integer getBufferLength() { return bufferLength; } @Override public void setBufferLength(Integer bufferLength) { this.bufferLength = bufferLength; } @Override public Boolean getStopOnErr() { return stopOnErr; } @Override public void setStopOnErr(Boolean stopOnErr) { this.stopOnErr = stopOnErr; } @Override public Boolean isRunning() { return shell != null && shell.isRunning(); } @Override public void start() { if (isRunning()) { throw new TaskStartException("the application has been started, can't start again", this); } shell = Shell.build(command).onOut((line, operator) -> { outBuffer.append(line).append("<br/>"); handleStringBuffer(outBuffer); }).onErr((line, operator) -> { errBuffer.append(line).append("<br/>"); handleStringBuffer(errBuffer); if (stopOnErr) { operator.stop(); } }).onException(e -> { shell.stop(); throw new ShellInternalException(String.format("run [%s] catch internal exception", appName), e, this); }).charset(charset) .sync(false) .environments(getEnvironments()) .inPath(inPath).logged(true); shell.run(); beginTime = new Date(); } @Override public void reStart() { if (isRunning()) { stop(); start(); } else { throw new TaskRestartException("the application has not been started, can't restart", this); } } @Override public void stop() { if (isRunning()) { shell.stop(); endTime = new Date(); } else { throw new TaskStopException("the application has not been started, can't stop", this); } } private void handleStringBuffer(StringBuffer buffer) { int outLength = buffer.length() - bufferLength; if (outLength > 0) { buffer.delete(0, outLength); } } }
/* * Copyright (C) 2014 The Android Open Source Project * Copyright (c) 1998, 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.crypto; import java.util.*; import java.security.*; import java.security.Provider.Service; import java.security.spec.AlgorithmParameterSpec; import java.nio.ByteBuffer; import sun.security.jca.*; import sun.security.jca.GetInstance.Instance; /** * This class provides the functionality of a "Message Authentication Code" * (MAC) algorithm. * * <p> A MAC provides a way to check * the integrity of information transmitted over or stored in an unreliable * medium, based on a secret key. Typically, message * authentication codes are used between two parties that share a secret * key in order to validate information transmitted between these * parties. * * <p> A MAC mechanism that is based on cryptographic hash functions is * referred to as HMAC. HMAC can be used with any cryptographic hash function, * e.g., MD5 or SHA-1, in combination with a secret shared key. HMAC is * specified in RFC 2104. * * <p> Android provides the following <code>Mac</code> algorithms: * <table> * <thead> * <tr> * <th>Algorithm</th> * <th>Supported API Levels</th> * </tr> * </thead> * <tbody> * <tr class="deprecated"> * <td>DESMAC</td> * <td>1-8</td> * </tr> * <tr class="deprecated"> * <td>DESMAC/CFB8</td> * <td>1-8</td> * </tr> * <tr class="deprecated"> * <td>DESedeMAC</td> * <td>1-8</td> * </tr> * <tr class="deprecated"> * <td>DESedeMAC/CFB8</td> * <td>1-8</td> * </tr> * <tr class="deprecated"> * <td>DESedeMAC64</td> * <td>1-8</td> * </tr> * <tr class="deprecated"> * <td>DESwithISO9797</td> * <td>1-8</td> * </tr> * <tr> * <td>HmacMD5</td> * <td>1+</td> * </tr> * <tr> * <td>HmacSHA1</td> * <td>1+</td> * </tr> * <tr> * <td>HmacSHA224</td> * <td>1-8,22+</td> * </tr> * <tr> * <td>HmacSHA256</td> * <td>1+</td> * </tr> * <tr> * <td>HmacSHA384</td> * <td>1+</td> * </tr> * <tr> * <td>HmacSHA512</td> * <td>1+</td> * </tr> * <tr class="deprecated"> * <td>ISO9797ALG3MAC</td> * <td>1-8</td> * </tr> * <tr> * <td>PBEwithHmacSHA</td> * <td>1+</td> * </tr> * <tr> * <td>PBEwithHmacSHA1</td> * <td>1+</td> * </tr> * <tr> * <td>PBEwithHmacSHA224</td> * <td>26+</td> * </tr> * <tr> * <td>PBEwithHmacSHA256</td> * <td>26+</td> * </tr> * <tr> * <td>PBEwithHmacSHA384</td> * <td>26+</td> * </tr> * <tr> * <td>PBEwithHmacSHA512</td> * <td>26+</td> * </tr> * </tbody> * </table> * * These algorithms are described in the * <a href="{@docRoot}/../technotes/guides/security/StandardNames.html#Mac"> * Mac section</a> of the * Java Cryptography Architecture Standard Algorithm Name Documentation. * * @author Jan Luehe * * @since 1.4 */ public class Mac implements Cloneable { // Android-removed: this debugging mechanism is not used in Android. /* private static final Debug debug = Debug.getInstance("jca", "Mac"); private static final Debug pdebug = Debug.getInstance("provider", "Provider"); private static final boolean skipDebug = Debug.isOn("engine=") && !Debug.isOn("mac"); */ // The provider private Provider provider; // The provider implementation (delegate) private MacSpi spi; // The name of the MAC algorithm. private final String algorithm; // Has this object been initialized? private boolean initialized = false; // BEGIN Android-removed: Redo the provider selection logic to allow reselecting provider. // When only the algorithm is specified, we want to allow the Mac provider for that // algorithm to change if multiple providers exist and they support different subsets of // keys. To that end, we don't hold an iterator and exhaust it when we need to choose // a provider like the upstream implementation, we reestablish the list of providers // each time. /* // next service to try in provider selection // null once provider is selected private Service firstService; // remaining services to try in provider selection // null once provider is selected private Iterator<Service> serviceIterator; */ // END Android-removed: Redo the provider selection logic to allow reselecting provider. private final Object lock; /** * Creates a MAC object. * * @param macSpi the delegate * @param provider the provider * @param algorithm the algorithm */ protected Mac(MacSpi macSpi, Provider provider, String algorithm) { this.spi = macSpi; this.provider = provider; this.algorithm = algorithm; lock = null; } // Android-changed: Remove Service and Iterator from constructor args. private Mac(String algorithm) { this.algorithm = algorithm; lock = new Object(); } /** * Returns the algorithm name of this <code>Mac</code> object. * * <p>This is the same name that was specified in one of the * <code>getInstance</code> calls that created this * <code>Mac</code> object. * * @return the algorithm name of this <code>Mac</code> object. */ public final String getAlgorithm() { return this.algorithm; } /** * Returns a <code>Mac</code> object that implements the * specified MAC algorithm. * * <p> This method traverses the list of registered security Providers, * starting with the most preferred Provider. * A new Mac object encapsulating the * MacSpi implementation from the first * Provider that supports the specified algorithm is returned. * * <p> Note that the list of registered providers may be retrieved via * the {@link Security#getProviders() Security.getProviders()} method. * * @param algorithm the standard name of the requested MAC algorithm. * See the Mac section in the <a href= * "{@docRoot}/../technotes/guides/security/StandardNames.html#Mac"> * Java Cryptography Architecture Standard Algorithm Name Documentation</a> * for information about standard algorithm names. * * @return the new <code>Mac</code> object. * * @exception NoSuchAlgorithmException if no Provider supports a * MacSpi implementation for the * specified algorithm. * * @see java.security.Provider */ public static final Mac getInstance(String algorithm) throws NoSuchAlgorithmException { List<Service> services = GetInstance.getServices("Mac", algorithm); // make sure there is at least one service from a signed provider Iterator<Service> t = services.iterator(); while (t.hasNext()) { Service s = t.next(); if (JceSecurity.canUseProvider(s.getProvider()) == false) { continue; } // Android-changed: Remove Service and Iterator from constructor args. // return new Mac(s, t, algorithm); return new Mac(algorithm); } throw new NoSuchAlgorithmException ("Algorithm " + algorithm + " not available"); } /** * Returns a <code>Mac</code> object that implements the * specified MAC algorithm. * * <p> A new Mac object encapsulating the * MacSpi implementation from the specified provider * is returned. The specified provider must be registered * in the security provider list. * * <p> Note that the list of registered providers may be retrieved via * the {@link Security#getProviders() Security.getProviders()} method. * * @param algorithm the standard name of the requested MAC algorithm. * See the Mac section in the <a href= * "{@docRoot}/../technotes/guides/security/StandardNames.html#Mac"> * Java Cryptography Architecture Standard Algorithm Name Documentation</a> * for information about standard algorithm names. * * @param provider the name of the provider. * * @return the new <code>Mac</code> object. * * @exception NoSuchAlgorithmException if a MacSpi * implementation for the specified algorithm is not * available from the specified provider. * * @exception NoSuchProviderException if the specified provider is not * registered in the security provider list. * * @exception IllegalArgumentException if the <code>provider</code> * is null or empty. * * @see java.security.Provider */ public static final Mac getInstance(String algorithm, String provider) throws NoSuchAlgorithmException, NoSuchProviderException { // Android-added: Check for Bouncy Castle deprecation Providers.checkBouncyCastleDeprecation(provider, "Mac", algorithm); Instance instance = JceSecurity.getInstance ("Mac", MacSpi.class, algorithm, provider); return new Mac((MacSpi)instance.impl, instance.provider, algorithm); } /** * Returns a <code>Mac</code> object that implements the * specified MAC algorithm. * * <p> A new Mac object encapsulating the * MacSpi implementation from the specified Provider * object is returned. Note that the specified Provider object * does not have to be registered in the provider list. * * @param algorithm the standard name of the requested MAC algorithm. * See the Mac section in the <a href= * "{@docRoot}/../technotes/guides/security/StandardNames.html#Mac"> * Java Cryptography Architecture Standard Algorithm Name Documentation</a> * for information about standard algorithm names. * * @param provider the provider. * * @return the new <code>Mac</code> object. * * @exception NoSuchAlgorithmException if a MacSpi * implementation for the specified algorithm is not available * from the specified Provider object. * * @exception IllegalArgumentException if the <code>provider</code> * is null. * * @see java.security.Provider */ public static final Mac getInstance(String algorithm, Provider provider) throws NoSuchAlgorithmException { // Android-added: Check for Bouncy Castle deprecation Providers.checkBouncyCastleDeprecation(provider, "Mac", algorithm); Instance instance = JceSecurity.getInstance ("Mac", MacSpi.class, algorithm, provider); return new Mac((MacSpi)instance.impl, instance.provider, algorithm); } // max number of debug warnings to print from chooseFirstProvider() private static int warnCount = 10; /** * Choose the Spi from the first provider available. Used if * delayed provider selection is not possible because init() * is not the first method called. */ void chooseFirstProvider() { // Android-changed: Check if lock is null rather than removed serviceIterator field. // if ((spi != null) || (serviceIterator == null)) { if (spi != null || lock == null) { return; } synchronized (lock) { if (spi != null) { return; } // Android-removed: this debugging mechanism is not used in Android. /* if (debug != null) { int w = --warnCount; if (w >= 0) { debug.println("Mac.init() not first method " + "called, disabling delayed provider selection"); if (w == 0) { debug.println("Further warnings of this type will " + "be suppressed"); } new Exception("Call trace").printStackTrace(); } } */ Exception lastException = null; // Android-changed: Provider selection; loop over a new list each time. for (Service s : GetInstance.getServices("Mac", algorithm)) { if (JceSecurity.canUseProvider(s.getProvider()) == false) { continue; } try { Object obj = s.newInstance(null); if (obj instanceof MacSpi == false) { continue; } spi = (MacSpi)obj; provider = s.getProvider(); // Android-removed: Provider selection; loop over a new list each time. /* // not needed any more firstService = null; serviceIterator = null; */ return; } catch (NoSuchAlgorithmException e) { lastException = e; } } ProviderException e = new ProviderException ("Could not construct MacSpi instance"); if (lastException != null) { e.initCause(lastException); } throw e; } } private void chooseProvider(Key key, AlgorithmParameterSpec params) throws InvalidKeyException, InvalidAlgorithmParameterException { synchronized (lock) { // Android-changed: Use the currently-selected provider only if no key was provided. // if (spi != null) { if (spi != null && (key == null || lock == null)) { spi.engineInit(key, params); return; } Exception lastException = null; // Android-changed: Provider selection; loop over a new list each time. for (Service s : GetInstance.getServices("Mac", algorithm)) { // if provider says it does not support this key, ignore it if (s.supportsParameter(key) == false) { continue; } if (JceSecurity.canUseProvider(s.getProvider()) == false) { continue; } try { MacSpi spi = (MacSpi)s.newInstance(null); spi.engineInit(key, params); provider = s.getProvider(); this.spi = spi; // Android-removed: Provider selection; loop over a new list each time. /* firstService = null; serviceIterator = null; */ return; } catch (Exception e) { // NoSuchAlgorithmException from newInstance() // InvalidKeyException from init() // RuntimeException (ProviderException) from init() if (lastException == null) { lastException = e; } } } // no working provider found, fail if (lastException instanceof InvalidKeyException) { throw (InvalidKeyException)lastException; } if (lastException instanceof InvalidAlgorithmParameterException) { throw (InvalidAlgorithmParameterException)lastException; } if (lastException instanceof RuntimeException) { throw (RuntimeException)lastException; } String kName = (key != null) ? key.getClass().getName() : "(null)"; throw new InvalidKeyException ("No installed provider supports this key: " + kName, lastException); } } /** * Returns the provider of this <code>Mac</code> object. * * @return the provider of this <code>Mac</code> object. */ public final Provider getProvider() { chooseFirstProvider(); return this.provider; } /** * Returns the length of the MAC in bytes. * * @return the MAC length in bytes. */ public final int getMacLength() { chooseFirstProvider(); return spi.engineGetMacLength(); } /** * Initializes this <code>Mac</code> object with the given key. * * @param key the key. * * @exception InvalidKeyException if the given key is inappropriate for * initializing this MAC. */ public final void init(Key key) throws InvalidKeyException { try { // Android-changed: Use the currently-selected provider only if no key was provided. // if (spi != null) { if (spi != null && (key == null || lock == null)) { spi.engineInit(key, null); } else { chooseProvider(key, null); } } catch (InvalidAlgorithmParameterException e) { throw new InvalidKeyException("init() failed", e); } initialized = true; // Android-removed: this debugging mechanism is not used in Android. /* if (!skipDebug && pdebug != null) { pdebug.println("Mac." + algorithm + " algorithm from: " + this.provider.getName()); } */ } /** * Initializes this <code>Mac</code> object with the given key and * algorithm parameters. * * @param key the key. * @param params the algorithm parameters. * * @exception InvalidKeyException if the given key is inappropriate for * initializing this MAC. * @exception InvalidAlgorithmParameterException if the given algorithm * parameters are inappropriate for this MAC. */ public final void init(Key key, AlgorithmParameterSpec params) throws InvalidKeyException, InvalidAlgorithmParameterException { // Android-changed: Use the currently-selected provider only if no key was provided. // if (spi != null) { if (spi != null && (key == null || lock == null)) { spi.engineInit(key, params); } else { chooseProvider(key, params); } initialized = true; // Android-removed: this debugging mechanism is not used in Android. /* if (!skipDebug && pdebug != null) { pdebug.println("Mac." + algorithm + " algorithm from: " + this.provider.getName()); } */ } /** * Processes the given byte. * * @param input the input byte to be processed. * * @exception IllegalStateException if this <code>Mac</code> has not been * initialized. */ public final void update(byte input) throws IllegalStateException { chooseFirstProvider(); if (initialized == false) { throw new IllegalStateException("MAC not initialized"); } spi.engineUpdate(input); } /** * Processes the given array of bytes. * * @param input the array of bytes to be processed. * * @exception IllegalStateException if this <code>Mac</code> has not been * initialized. */ public final void update(byte[] input) throws IllegalStateException { chooseFirstProvider(); if (initialized == false) { throw new IllegalStateException("MAC not initialized"); } if (input != null) { spi.engineUpdate(input, 0, input.length); } } /** * Processes the first <code>len</code> bytes in <code>input</code>, * starting at <code>offset</code> inclusive. * * @param input the input buffer. * @param offset the offset in <code>input</code> where the input starts. * @param len the number of bytes to process. * * @exception IllegalStateException if this <code>Mac</code> has not been * initialized. */ public final void update(byte[] input, int offset, int len) throws IllegalStateException { chooseFirstProvider(); if (initialized == false) { throw new IllegalStateException("MAC not initialized"); } if (input != null) { if ((offset < 0) || (len > (input.length - offset)) || (len < 0)) throw new IllegalArgumentException("Bad arguments"); spi.engineUpdate(input, offset, len); } } /** * Processes <code>input.remaining()</code> bytes in the ByteBuffer * <code>input</code>, starting at <code>input.position()</code>. * Upon return, the buffer's position will be equal to its limit; * its limit will not have changed. * * @param input the ByteBuffer * * @exception IllegalStateException if this <code>Mac</code> has not been * initialized. * @since 1.5 */ public final void update(ByteBuffer input) { chooseFirstProvider(); if (initialized == false) { throw new IllegalStateException("MAC not initialized"); } if (input == null) { throw new IllegalArgumentException("Buffer must not be null"); } spi.engineUpdate(input); } /** * Finishes the MAC operation. * * <p>A call to this method resets this <code>Mac</code> object to the * state it was in when previously initialized via a call to * <code>init(Key)</code> or * <code>init(Key, AlgorithmParameterSpec)</code>. * That is, the object is reset and available to generate another MAC from * the same key, if desired, via new calls to <code>update</code> and * <code>doFinal</code>. * (In order to reuse this <code>Mac</code> object with a different key, * it must be reinitialized via a call to <code>init(Key)</code> or * <code>init(Key, AlgorithmParameterSpec)</code>. * * @return the MAC result. * * @exception IllegalStateException if this <code>Mac</code> has not been * initialized. */ public final byte[] doFinal() throws IllegalStateException { chooseFirstProvider(); if (initialized == false) { throw new IllegalStateException("MAC not initialized"); } byte[] mac = spi.engineDoFinal(); spi.engineReset(); return mac; } /** * Finishes the MAC operation. * * <p>A call to this method resets this <code>Mac</code> object to the * state it was in when previously initialized via a call to * <code>init(Key)</code> or * <code>init(Key, AlgorithmParameterSpec)</code>. * That is, the object is reset and available to generate another MAC from * the same key, if desired, via new calls to <code>update</code> and * <code>doFinal</code>. * (In order to reuse this <code>Mac</code> object with a different key, * it must be reinitialized via a call to <code>init(Key)</code> or * <code>init(Key, AlgorithmParameterSpec)</code>. * * <p>The MAC result is stored in <code>output</code>, starting at * <code>outOffset</code> inclusive. * * @param output the buffer where the MAC result is stored * @param outOffset the offset in <code>output</code> where the MAC is * stored * * @exception ShortBufferException if the given output buffer is too small * to hold the result * @exception IllegalStateException if this <code>Mac</code> has not been * initialized. */ public final void doFinal(byte[] output, int outOffset) throws ShortBufferException, IllegalStateException { chooseFirstProvider(); if (initialized == false) { throw new IllegalStateException("MAC not initialized"); } int macLen = getMacLength(); if (output == null || output.length-outOffset < macLen) { throw new ShortBufferException ("Cannot store MAC in output buffer"); } byte[] mac = doFinal(); System.arraycopy(mac, 0, output, outOffset, macLen); return; } /** * Processes the given array of bytes and finishes the MAC operation. * * <p>A call to this method resets this <code>Mac</code> object to the * state it was in when previously initialized via a call to * <code>init(Key)</code> or * <code>init(Key, AlgorithmParameterSpec)</code>. * That is, the object is reset and available to generate another MAC from * the same key, if desired, via new calls to <code>update</code> and * <code>doFinal</code>. * (In order to reuse this <code>Mac</code> object with a different key, * it must be reinitialized via a call to <code>init(Key)</code> or * <code>init(Key, AlgorithmParameterSpec)</code>. * * @param input data in bytes * @return the MAC result. * * @exception IllegalStateException if this <code>Mac</code> has not been * initialized. */ public final byte[] doFinal(byte[] input) throws IllegalStateException { chooseFirstProvider(); if (initialized == false) { throw new IllegalStateException("MAC not initialized"); } update(input); return doFinal(); } /** * Resets this <code>Mac</code> object. * * <p>A call to this method resets this <code>Mac</code> object to the * state it was in when previously initialized via a call to * <code>init(Key)</code> or * <code>init(Key, AlgorithmParameterSpec)</code>. * That is, the object is reset and available to generate another MAC from * the same key, if desired, via new calls to <code>update</code> and * <code>doFinal</code>. * (In order to reuse this <code>Mac</code> object with a different key, * it must be reinitialized via a call to <code>init(Key)</code> or * <code>init(Key, AlgorithmParameterSpec)</code>. */ public final void reset() { chooseFirstProvider(); spi.engineReset(); } /** * Returns a clone if the provider implementation is cloneable. * * @return a clone if the provider implementation is cloneable. * * @exception CloneNotSupportedException if this is called on a * delegate that does not support <code>Cloneable</code>. */ public final Object clone() throws CloneNotSupportedException { chooseFirstProvider(); Mac that = (Mac)super.clone(); that.spi = (MacSpi)this.spi.clone(); return that; } // BEGIN Android-added: Allow access to the current SPI for testing purposes. /** * Returns the {@code MacSpi} backing this {@code Mac} or {@code null} if no {@code MacSpi} is * backing this {@code Mac}. * * @hide */ public MacSpi getCurrentSpi() { return spi; } // END Android-added: Allow access to the current SPI for testing purposes. }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2022 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.parser.common.grammar; /** * Represents terminal part of the text */ public abstract class TerminalExpression extends RuleExpression { public final String pattern; public TerminalExpression(String pattern) { this.pattern = pattern; } }
package org.bukkit.inventory; import com.google.common.collect.ImmutableMap; import java.util.LinkedHashMap; import java.util.Map; import org.apache.commons.lang.Validate; import org.bukkit.Bukkit; import org.bukkit.Material; import org.bukkit.Utility; import org.bukkit.configuration.serialization.ConfigurationSerializable; import org.bukkit.enchantments.Enchantment; import org.bukkit.inventory.meta.Damageable; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.material.MaterialData; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * Represents a stack of items. * <p> * <b>IMPORTANT: An <i>Item</i>Stack is only designed to contain <i>items</i>. Do not * use this class to encapsulate Materials for which {@link Material#isItem()} * returns false.</b> */ public class ItemStack implements Cloneable, ConfigurationSerializable { private Material type = Material.AIR; private int amount = 0; private MaterialData data = null; private ItemMeta meta; @Utility protected ItemStack() {} /** * Defaults stack size to 1, with no extra data. * <p> * <b>IMPORTANT: An <i>Item</i>Stack is only designed to contain * <i>items</i>. Do not use this class to encapsulate Materials for which * {@link Material#isItem()} returns false.</b> * * @param type item material */ public ItemStack(@NotNull final Material type) { this(type, 1); } /** * An item stack with no extra data. * <p> * <b>IMPORTANT: An <i>Item</i>Stack is only designed to contain * <i>items</i>. Do not use this class to encapsulate Materials for which * {@link Material#isItem()} returns false.</b> * * @param type item material * @param amount stack size */ public ItemStack(@NotNull final Material type, final int amount) { this(type, amount, (short) 0); } /** * An item stack with the specified damage / durability * * @param type item material * @param amount stack size * @param damage durability / damage * @deprecated see {@link #setDurability(short)} */ public ItemStack(@NotNull final Material type, final int amount, final short damage) { this(type, amount, damage, null); } /** * @param type the type * @param amount the amount in the stack * @param damage the damage value of the item * @param data the data value or null * @deprecated this method uses an ambiguous data byte object */ @Deprecated public ItemStack(@NotNull final Material type, final int amount, final short damage, @Nullable final Byte data) { Validate.notNull(type, "Material cannot be null"); this.type = type; this.amount = amount; if (damage != 0) { setDurability(damage); } if (data != null) { createData(data); } } /** * Creates a new item stack derived from the specified stack * * @param stack the stack to copy * @throws IllegalArgumentException if the specified stack is null or * returns an item meta not created by the item factory */ public ItemStack(@NotNull final ItemStack stack) throws IllegalArgumentException { Validate.notNull(stack, "Cannot copy null stack"); this.type = stack.getType(); this.amount = stack.getAmount(); if (this.type.isLegacy()) { this.data = stack.getData(); } if (stack.hasItemMeta()) { setItemMeta0(stack.getItemMeta(), type); } } /** * Gets the type of this item * * @return Type of the items in this stack */ @Utility @NotNull public Material getType() { return type; } /** * Sets the type of this item * <p> * Note that in doing so you will reset the MaterialData for this stack. * <p> * <b>IMPORTANT: An <i>Item</i>Stack is only designed to contain * <i>items</i>. Do not use this class to encapsulate Materials for which * {@link Material#isItem()} returns false.</b> * * @param type New type to set the items in this stack to */ @Utility public void setType(@NotNull Material type) { Validate.notNull(type, "Material cannot be null"); this.type = type; if (this.meta != null) { this.meta = Bukkit.getItemFactory().asMetaFor(meta, type); } if (type.isLegacy()) { createData((byte) 0); } else { this.data = null; } } /** * Gets the amount of items in this stack * * @return Amount of items in this stack */ public int getAmount() { return amount; } /** * Sets the amount of items in this stack * * @param amount New amount of items in this stack */ public void setAmount(int amount) { this.amount = amount; } /** * Gets the MaterialData for this stack of items * * @return MaterialData for this item */ @Nullable public MaterialData getData() { Material mat = Bukkit.getUnsafe().toLegacy(getType()); if (data == null && mat != null && mat.getData() != null) { data = mat.getNewData((byte) this.getDurability()); } return data; } /** * Sets the MaterialData for this stack of items * * @param data New MaterialData for this item */ public void setData(@Nullable MaterialData data) { if (data == null) { this.data = data; } else { Material mat = Bukkit.getUnsafe().toLegacy(getType()); if ((data.getClass() == mat.getData()) || (data.getClass() == MaterialData.class)) { this.data = data; } else { throw new IllegalArgumentException("Provided data is not of type " + mat.getData().getName() + ", found " + data.getClass().getName()); } } } /** * Sets the durability of this item * * @param durability Durability of this item * @deprecated durability is now part of ItemMeta. To avoid confusion and * misuse, {@link #getItemMeta()}, {@link #setItemMeta(ItemMeta)} and * {@link Damageable#setDamage(int)} should be used instead. This is because * any call to this method will be overwritten by subsequent setting of * ItemMeta which was created before this call. */ @Deprecated public void setDurability(final short durability) { ItemMeta meta = getItemMeta(); if (meta != null) { ((Damageable) meta).setDamage(durability); setItemMeta(meta); } } /** * Gets the durability of this item * * @return Durability of this item * @deprecated see {@link #setDurability(short)} */ @Deprecated public short getDurability() { ItemMeta meta = getItemMeta(); return (meta == null) ? 0 : (short) ((Damageable) meta).getDamage(); } /** * Get the maximum stacksize for the material hold in this ItemStack. * (Returns -1 if it has no idea) * * @return The maximum you can stack this material to. */ @Utility public int getMaxStackSize() { Material material = getType(); if (material != null) { return material.getMaxStackSize(); } return -1; } private void createData(final byte data) { this.data = type.getNewData(data); } @Override @Utility public String toString() { StringBuilder toString = new StringBuilder("ItemStack{").append(getType().name()).append(" x ").append(getAmount()); if (hasItemMeta()) { toString.append(", ").append(getItemMeta()); } return toString.append('}').toString(); } @Override @Utility public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof ItemStack)) { return false; } ItemStack stack = (ItemStack) obj; return getAmount() == stack.getAmount() && isSimilar(stack); } /** * This method is the same as equals, but does not consider stack size * (amount). * * @param stack the item stack to compare to * @return true if the two stacks are equal, ignoring the amount */ @Utility public boolean isSimilar(@Nullable ItemStack stack) { if (stack == null) { return false; } if (stack == this) { return true; } Material comparisonType = (this.type.isLegacy()) ? Bukkit.getUnsafe().fromLegacy(this.getData(), true) : this.type; // This may be called from legacy item stacks, try to get the right material return comparisonType == stack.getType() && getDurability() == stack.getDurability() && hasItemMeta() == stack.hasItemMeta() && (hasItemMeta() ? Bukkit.getItemFactory().equals(getItemMeta(), stack.getItemMeta()) : true); } @NotNull @Override public ItemStack clone() { try { ItemStack itemStack = (ItemStack) super.clone(); if (this.meta != null) { itemStack.meta = this.meta.clone(); } if (this.data != null) { itemStack.data = this.data.clone(); } return itemStack; } catch (CloneNotSupportedException e) { throw new Error(e); } } @Override @Utility public int hashCode() { int hash = 1; hash = hash * 31 + getType().hashCode(); hash = hash * 31 + getAmount(); hash = hash * 31 + (getDurability() & 0xffff); hash = hash * 31 + (hasItemMeta() ? (meta == null ? getItemMeta().hashCode() : meta.hashCode()) : 0); return hash; } /** * Checks if this ItemStack contains the given {@link Enchantment} * * @param ench Enchantment to test * @return True if this has the given enchantment */ public boolean containsEnchantment(@NotNull Enchantment ench) { return meta == null ? false : meta.hasEnchant(ench); } /** * Gets the level of the specified enchantment on this item stack * * @param ench Enchantment to check * @return Level of the enchantment, or 0 */ public int getEnchantmentLevel(@NotNull Enchantment ench) { return meta == null ? 0 : meta.getEnchantLevel(ench); } /** * Gets a map containing all enchantments and their levels on this item. * * @return Map of enchantments. */ @NotNull public Map<Enchantment, Integer> getEnchantments() { return meta == null ? ImmutableMap.<Enchantment, Integer>of() : meta.getEnchants(); } /** * Adds the specified enchantments to this item stack. * <p> * This method is the same as calling {@link * #addEnchantment(org.bukkit.enchantments.Enchantment, int)} for each * element of the map. * * @param enchantments Enchantments to add * @throws IllegalArgumentException if the specified enchantments is null * @throws IllegalArgumentException if any specific enchantment or level * is null. <b>Warning</b>: Some enchantments may be added before this * exception is thrown. */ @Utility public void addEnchantments(@NotNull Map<Enchantment, Integer> enchantments) { Validate.notNull(enchantments, "Enchantments cannot be null"); for (Map.Entry<Enchantment, Integer> entry : enchantments.entrySet()) { addEnchantment(entry.getKey(), entry.getValue()); } } /** * Adds the specified {@link Enchantment} to this item stack. * <p> * If this item stack already contained the given enchantment (at any * level), it will be replaced. * * @param ench Enchantment to add * @param level Level of the enchantment * @throws IllegalArgumentException if enchantment null, or enchantment is * not applicable */ @Utility public void addEnchantment(@NotNull Enchantment ench, int level) { Validate.notNull(ench, "Enchantment cannot be null"); if ((level < ench.getStartLevel()) || (level > ench.getMaxLevel())) { throw new IllegalArgumentException("Enchantment level is either too low or too high (given " + level + ", bounds are " + ench.getStartLevel() + " to " + ench.getMaxLevel() + ")"); } else if (!ench.canEnchantItem(this)) { throw new IllegalArgumentException("Specified enchantment cannot be applied to this itemstack"); } addUnsafeEnchantment(ench, level); } /** * Adds the specified enchantments to this item stack in an unsafe manner. * <p> * This method is the same as calling {@link * #addUnsafeEnchantment(org.bukkit.enchantments.Enchantment, int)} for * each element of the map. * * @param enchantments Enchantments to add */ @Utility public void addUnsafeEnchantments(@NotNull Map<Enchantment, Integer> enchantments) { for (Map.Entry<Enchantment, Integer> entry : enchantments.entrySet()) { addUnsafeEnchantment(entry.getKey(), entry.getValue()); } } /** * Adds the specified {@link Enchantment} to this item stack. * <p> * If this item stack already contained the given enchantment (at any * level), it will be replaced. * <p> * This method is unsafe and will ignore level restrictions or item type. * Use at your own discretion. * * @param ench Enchantment to add * @param level Level of the enchantment */ public void addUnsafeEnchantment(@NotNull Enchantment ench, int level) { ItemMeta itemMeta = (meta == null ? meta = Bukkit.getItemFactory().getItemMeta(type) : meta); if (itemMeta != null) { itemMeta.addEnchant(ench, level, true); } } /** * Removes the specified {@link Enchantment} if it exists on this * ItemStack * * @param ench Enchantment to remove * @return Previous level, or 0 */ public int removeEnchantment(@NotNull Enchantment ench) { int level = getEnchantmentLevel(ench); if (level == 0 || meta == null) { return level; } meta.removeEnchant(ench); return level; } @Override @NotNull @Utility public Map<String, Object> serialize() { Map<String, Object> result = new LinkedHashMap<String, Object>(); result.put("v", Bukkit.getUnsafe().getDataVersion()); // Include version to indicate we are using modern material names (or LEGACY prefix) result.put("type", getType().name()); if (getAmount() != 1) { result.put("amount", getAmount()); } ItemMeta meta = getItemMeta(); if (!Bukkit.getItemFactory().equals(meta, null)) { result.put("meta", meta); } return result; } /** * Required method for configuration serialization * * @param args map to deserialize * @return deserialized item stack * @see ConfigurationSerializable */ @NotNull public static ItemStack deserialize(@NotNull Map<String, Object> args) { int version = (args.containsKey("v")) ? ((Number) args.get("v")).intValue() : -1; short damage = 0; int amount = 1; if (args.containsKey("damage")) { damage = ((Number) args.get("damage")).shortValue(); } Material type; if (version < 0) { type = Material.getMaterial(Material.LEGACY_PREFIX + (String) args.get("type")); byte dataVal = (type != null && type.getMaxDurability() == 0) ? (byte) damage : 0; // Actually durable items get a 0 passed into conversion type = Bukkit.getUnsafe().fromLegacy(new MaterialData(type, dataVal), true); // We've converted now so the data val isn't a thing and can be reset if (dataVal != 0) { damage = 0; } } else { type = Bukkit.getUnsafe().getMaterial((String) args.get("type"), version); } if (args.containsKey("amount")) { amount = ((Number) args.get("amount")).intValue(); } ItemStack result = new ItemStack(type, amount, damage); if (args.containsKey("enchantments")) { // Backward compatiblity, @deprecated Object raw = args.get("enchantments"); if (raw instanceof Map) { Map<?, ?> map = (Map<?, ?>) raw; for (Map.Entry<?, ?> entry : map.entrySet()) { Enchantment enchantment = Enchantment.getByName(entry.getKey().toString()); if ((enchantment != null) && (entry.getValue() instanceof Integer)) { result.addUnsafeEnchantment(enchantment, (Integer) entry.getValue()); } } } } else if (args.containsKey("meta")) { // We cannot and will not have meta when enchantments (pre-ItemMeta) exist Object raw = args.get("meta"); if (raw instanceof ItemMeta) { ((ItemMeta) raw).setVersion(version); result.setItemMeta((ItemMeta) raw); } } if (version < 0) { // Set damage again incase meta overwrote it if (args.containsKey("damage")) { result.setDurability(damage); } } return result; } /** * Get a copy of this ItemStack's {@link ItemMeta}. * * @return a copy of the current ItemStack's ItemData */ @Nullable public ItemMeta getItemMeta() { return this.meta == null ? Bukkit.getItemFactory().getItemMeta(this.type) : this.meta.clone(); } /** * Checks to see if any meta data has been defined. * * @return Returns true if some meta data has been set for this item */ public boolean hasItemMeta() { return !Bukkit.getItemFactory().equals(meta, null); } /** * Set the ItemMeta of this ItemStack. * * @param itemMeta new ItemMeta, or null to indicate meta data be cleared. * @return True if successfully applied ItemMeta, see {@link * ItemFactory#isApplicable(ItemMeta, ItemStack)} * @throws IllegalArgumentException if the item meta was not created by * the {@link ItemFactory} */ public boolean setItemMeta(@Nullable ItemMeta itemMeta) { return setItemMeta0(itemMeta, type); } /* * Cannot be overridden, so it's safe for constructor call */ private boolean setItemMeta0(@Nullable ItemMeta itemMeta, @NotNull Material material) { if (itemMeta == null) { this.meta = null; return true; } if (!Bukkit.getItemFactory().isApplicable(itemMeta, material)) { return false; } this.meta = Bukkit.getItemFactory().asMetaFor(itemMeta, material); Material newType = Bukkit.getItemFactory().updateMaterial(meta, material); if (this.type != newType) { this.type = newType; } if (this.meta == itemMeta) { this.meta = itemMeta.clone(); } return true; } }
package com.team2.pojo.base; public class CountInfo { Long userCount; Long infoCount; public Long getUserCount() { return userCount; } public void setUserCount(Long userCount) { this.userCount = userCount; } public Long getInfoCount() { return infoCount; } public void setInfoCount(Long infoCount) { this.infoCount = infoCount; } }
package com.binance.dex.api.client; import com.binance.dex.api.client.domain.*; import com.binance.dex.api.client.domain.broadcast.*; import com.binance.dex.api.client.domain.broadcast.Burn; import com.binance.dex.api.client.domain.broadcast.CancelOrder; import com.binance.dex.api.client.domain.broadcast.CreateValidator; import com.binance.dex.api.client.domain.broadcast.RemoveValidator; import com.binance.dex.api.client.domain.broadcast.Deposit; import com.binance.dex.api.client.domain.broadcast.Issue; import com.binance.dex.api.client.domain.broadcast.Mint; import com.binance.dex.api.client.domain.broadcast.NewOrder; import com.binance.dex.api.client.domain.broadcast.SubmitProposal; import com.binance.dex.api.client.domain.broadcast.TokenFreeze; import com.binance.dex.api.client.domain.broadcast.TokenUnfreeze; import com.binance.dex.api.client.domain.broadcast.Transaction; import com.binance.dex.api.client.domain.broadcast.Vote; import com.binance.dex.api.client.encoding.Crypto; import com.binance.dex.api.client.encoding.message.InputOutput; import com.binance.dex.api.client.encoding.message.MessageType; import com.binance.dex.api.client.encoding.message.Token; import com.binance.dex.api.proto.*; import com.binance.dex.api.proto.TimeLock; import com.binance.dex.api.proto.TimeRelock; import com.binance.dex.api.proto.TimeUnlock; import com.google.protobuf.InvalidProtocolBufferException; import java.time.Instant; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; public class TransactionConverter { private String hrp; public TransactionConverter(String hrp){ this.hrp = hrp; } public List<Transaction> convert(com.binance.dex.api.client.domain.jsonrpc.BlockInfoResult.Transaction txMessage) { try { byte[] value = txMessage.getTx(); int startIndex = getStartIndex(value); byte[] array = new byte[value.length - startIndex]; System.arraycopy(value, startIndex, array, 0, array.length); StdTx stdTx = StdTx.parseFrom(array); StdSignature stdSignature = StdSignature.parseFrom(stdTx.getSignatures(0)); return stdTx.getMsgsList().stream() .map(byteString -> { byte[] bytes = byteString.toByteArray(); Transaction transaction = convert(bytes); if (null == transaction) { return null; } transaction.setHash(txMessage.getHash()); transaction.setHeight(txMessage.getHeight()); transaction.setCode(txMessage.getTx_result().getCode()); transaction.setLog(txMessage.getTx_result().getLog()); transaction.setTags(txMessage.getTx_result().getTags()); transaction.setMemo(stdTx.getMemo()); transaction.setResultData(txMessage.getTx_result().getData()); transaction.setSource(stdTx.getSource()); transaction.setSequence(stdSignature.getSequence()); return transaction; }).filter(Objects::nonNull).collect(Collectors.toList()); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } public int getStartIndex(byte[] bytes) { for (int i = 0; i < bytes.length; i++) { if (((int) bytes[i] & 0xff) < 0x80) { return i + 5; } } return -1; } public Transaction convert(byte[] bytes) { try { MessageType messageType = MessageType.getMessageType(bytes); if (null == messageType) { return null; } switch (messageType) { case Send: return convertTransfer(bytes); case NewOrder: return convertNewOrder(bytes); case CancelOrder: return convertCancelOrder(bytes); case TokenFreeze: return convertTokenFreeze(bytes); case TokenUnfreeze: return convertTokenUnfreeze(bytes); case Vote: return convertVote(bytes); case Issue: return convertIssue(bytes); case Burn: return convertBurn(bytes); case Mint: return convertMint(bytes); case SubmitProposal: return convertSubmitProposal(bytes); case Deposit: return convertDeposit(bytes); case CreateValidator: return convertCreateValidator(bytes); case RemoveValidator: return convertRemoveValidator(bytes); case Listing: return convertListing(bytes); case TimeLock: return convertTimeLock(bytes); case TimeUnlock: return convertTimeUnlock(bytes); case TimeRelock: return convertTimeRelock(bytes); } return null; } catch (Exception e) { throw new RuntimeException(e); } } private Transaction convertTimeRelock(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); TimeRelock timeRelock = TimeRelock.parseFrom(array); com.binance.dex.api.client.domain.broadcast.TimeRelock trl = new com.binance.dex.api.client.domain.broadcast.TimeRelock(); trl.setFromAddr(Crypto.encodeAddress(hrp,timeRelock.getFrom().toByteArray())); trl.setLockId(timeRelock.getTimeLockId()); trl.setLockTime(Date.from(Instant.ofEpochSecond(timeRelock.getLockTime()))); trl.setDescription(timeRelock.getDescription()); List<Token> amount = timeRelock.getAmountList().stream().map(token -> { Token msgToken = new Token(); msgToken.setAmount(token.getAmount()); msgToken.setDenom(token.getDenom()); return msgToken; }).collect(Collectors.toList()); trl.setAmount(amount); Transaction transaction = new Transaction(); transaction.setTxType(TxType.TimeRelock); transaction.setRealTx(trl); return transaction; } private Transaction convertTimeUnlock(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); TimeUnlock timeUnlock = TimeUnlock.parseFrom(array); com.binance.dex.api.client.domain.broadcast.TimeUnlock tul = new com.binance.dex.api.client.domain.broadcast.TimeUnlock(); tul.setFromAddr(Crypto.encodeAddress(hrp,timeUnlock.getFrom().toByteArray())); tul.setLockId(timeUnlock.getTimeLockId()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.TimeUnlock); transaction.setRealTx(tul); return transaction; } private Transaction convertTimeLock(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); TimeLock timeLock = TimeLock.parseFrom(array); com.binance.dex.api.client.domain.broadcast.TimeLock tl = new com.binance.dex.api.client.domain.broadcast.TimeLock(); tl.setFromAddr(Crypto.encodeAddress(hrp,timeLock.getFrom().toByteArray())); tl.setDescription(timeLock.getDescription()); tl.setLockTime(Date.from(Instant.ofEpochSecond(timeLock.getLockTime()))); List<Token> amount = timeLock.getAmountList().stream().map(token -> { Token msgToken = new Token(); msgToken.setAmount(token.getAmount()); msgToken.setDenom(token.getDenom()); return msgToken; }).collect(Collectors.toList()); tl.setAmount(amount); Transaction transaction = new Transaction(); transaction.setTxType(TxType.TimeLock); transaction.setRealTx(tl); return transaction; } protected Transaction convertTransfer(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); Send send = Send.parseFrom(array); TransferInfo transfer = new TransferInfo(); transfer.setInputs(send.getInputsList().stream().map(i -> { InputOutput input = new InputOutput(); input.setAddress(Crypto.encodeAddress(hrp, i.getAddress().toByteArray())); input.setCoins(i.getCoinsList().stream() .map(Token::of) .collect(Collectors.toList())); return input; }).collect(Collectors.toList())); transfer.setOutputs(send.getOutputsList().stream().map(o -> { InputOutput output = new InputOutput(); output.setAddress(Crypto.encodeAddress(hrp, o.getAddress().toByteArray())); output.setCoins(o.getCoinsList().stream() .map(Token::of) .collect(Collectors.toList())); return output; }).collect(Collectors.toList())); Transaction transaction = new Transaction(); transaction.setTxType(TxType.TRANSFER); transaction.setRealTx(transfer); return transaction; } protected Transaction convertNewOrder(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.NewOrder newOrderMessage = com.binance.dex.api.proto.NewOrder.parseFrom(array); NewOrder newOrder = new NewOrder(); newOrder.setSender(Crypto.encodeAddress(hrp, newOrderMessage.getSender().toByteArray())); newOrder.setSymbol(newOrderMessage.getSymbol()); newOrder.setOrderType(OrderType.fromValue(newOrderMessage.getOrdertype())); newOrder.setPrice("" + newOrderMessage.getPrice()); newOrder.setQuantity("" + newOrderMessage.getQuantity()); newOrder.setSide(OrderSide.fromValue(newOrderMessage.getSide())); newOrder.setTimeInForce(TimeInForce.fromValue(newOrderMessage.getTimeinforce())); newOrder.setOrderId(newOrderMessage.getId()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.NEW_ORDER); transaction.setRealTx(newOrder); return transaction; } protected Transaction convertCancelOrder(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.CancelOrder cancelOrderOrderMessage = com.binance.dex.api.proto.CancelOrder.parseFrom(array); CancelOrder cancelOrder = new CancelOrder(); cancelOrder.setSender(Crypto.encodeAddress(hrp, cancelOrderOrderMessage.getSender().toByteArray())); cancelOrder.setRefId(cancelOrderOrderMessage.getRefid()); cancelOrder.setSymbol(cancelOrderOrderMessage.getSymbol()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.CANCEL_ORDER); transaction.setRealTx(cancelOrder); return transaction; } protected Transaction convertTokenFreeze(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.TokenFreeze tokenFreezeMessage = com.binance.dex.api.proto.TokenFreeze.parseFrom(array); TokenFreeze tokenFreeze = new TokenFreeze(); tokenFreeze.setFrom(Crypto.encodeAddress(hrp, tokenFreezeMessage.getFrom().toByteArray())); tokenFreeze.setAmount("" + tokenFreezeMessage.getAmount()); tokenFreeze.setSymbol(tokenFreezeMessage.getSymbol()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.FREEZE_TOKEN); transaction.setRealTx(tokenFreeze); return transaction; } protected Transaction convertTokenUnfreeze(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.TokenUnfreeze tokenUnfreezeMessage = com.binance.dex.api.proto.TokenUnfreeze.parseFrom(array); TokenUnfreeze tokenUnfreeze = new TokenUnfreeze(); tokenUnfreeze.setFrom(Crypto.encodeAddress(hrp, tokenUnfreezeMessage.getFrom().toByteArray())); tokenUnfreeze.setSymbol(tokenUnfreezeMessage.getSymbol()); tokenUnfreeze.setAmount("" + tokenUnfreezeMessage.getAmount()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.UNFREEZE_TOKEN); transaction.setRealTx(tokenUnfreeze); return transaction; } protected Transaction convertVote(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Vote voteMessage = com.binance.dex.api.proto.Vote.parseFrom(array); Vote vote = new Vote(); vote.setVoter(Crypto.encodeAddress(hrp, voteMessage.getVoter().toByteArray())); vote.setOption((int) voteMessage.getOption()); vote.setProposalId(voteMessage.getProposalId()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.VOTE); transaction.setRealTx(vote); return transaction; } protected Transaction convertIssue(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Issue issueMessage = com.binance.dex.api.proto.Issue.parseFrom(array); Issue issue = new Issue(); issue.setFrom(Crypto.encodeAddress(hrp, issueMessage.getFrom().toByteArray())); issue.setName(issueMessage.getName()); issue.setSymbol(issueMessage.getSymbol()); issue.setTotalSupply(issueMessage.getTotalSupply()); issue.setMintable(issueMessage.getMintable()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.ISSUE); transaction.setRealTx(issue); return transaction; } protected Transaction convertBurn(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Burn burnMessage = com.binance.dex.api.proto.Burn.parseFrom(array); Burn burn = new Burn(); burn.setFrom(Crypto.encodeAddress(hrp, burnMessage.getFrom().toByteArray())); burn.setSymbol(burnMessage.getSymbol()); burn.setAmount(burnMessage.getAmount()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.BURN); transaction.setRealTx(burn); return transaction; } protected Transaction convertMint(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Mint mintMessage = com.binance.dex.api.proto.Mint.parseFrom(array); Mint mint = new Mint(); mint.setFrom(Crypto.encodeAddress(hrp, mintMessage.getFrom().toByteArray())); mint.setSymbol(mintMessage.getSymbol()); mint.setAmount(mintMessage.getAmount()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.MINT); transaction.setRealTx(mint); return transaction; } protected Transaction convertSubmitProposal(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.SubmitProposal proposalMessage = com.binance.dex.api.proto.SubmitProposal.parseFrom(array); SubmitProposal proposal = new SubmitProposal(); proposal.setTitle(proposalMessage.getTitle()); proposal.setDescription(proposalMessage.getDescription()); proposal.setProposalType(ProposalType.fromValue(proposalMessage.getProposalType())); proposal.setProposer(Crypto.encodeAddress(hrp, proposalMessage.getProposer().toByteArray())); if (null != proposalMessage.getInitialDepositList()) { proposal.setInitDeposit(proposalMessage.getInitialDepositList().stream() .map(com.binance.dex.api.client.encoding.message.Token::of).collect(Collectors.toList())); } proposal.setVotingPeriod(proposalMessage.getVotingPeriod()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.SUBMIT_PROPOSAL); transaction.setRealTx(proposal); return transaction; } private Transaction convertDeposit(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Deposit depositMessage = com.binance.dex.api.proto.Deposit.parseFrom(array); Deposit deposit = new Deposit(); deposit.setProposalId(depositMessage.getProposalId()); deposit.setDepositer(Crypto.encodeAddress(hrp,depositMessage.getDepositer().toByteArray())); if(null != depositMessage.getAmountList()){ deposit.setAmount(depositMessage.getAmountList().stream() .map(com.binance.dex.api.client.encoding.message.Token::of).collect(Collectors.toList())); } Transaction transaction = new Transaction(); transaction.setTxType(TxType.DEPOSIT); transaction.setRealTx(deposit); return transaction; } private Transaction convertCreateValidator(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); //com.binance.dex.api.proto.CreateValidator createValidatorMessage = com.binance.dex.api.proto.CreateValidator.parseFrom(array); RealCreateValidator realCreateValidator = RealCreateValidator.parseFrom(array); CreateValidator createValidator = new CreateValidator(); createValidator.setDelegatorAddress(Crypto.encodeAddress(hrp,realCreateValidator.getCreateValidator().getDelegatorAddress().toByteArray())); createValidator.setValidatorAddress(Crypto.encodeAddress(hrp,realCreateValidator.getCreateValidator().getValidatorAddress().toByteArray())); createValidator.setDelegation(com.binance.dex.api.client.encoding.message.Token.of(realCreateValidator.getCreateValidator().getDelegation())); createValidator.setProposalId(realCreateValidator.getProposalId()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.CREATE_VALIDATOR); transaction.setRealTx(createValidator); return transaction; } private Transaction convertRemoveValidator(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.RemoveValidator removeValidatorMessage = com.binance.dex.api.proto.RemoveValidator.parseFrom(array); RemoveValidator removeValidator = new RemoveValidator(); removeValidator.setLauncherAddr(Crypto.encodeAddress(hrp,removeValidatorMessage.getLauncherAddr().toByteArray())); removeValidator.setValAddr(Crypto.encodeAddress(hrp,removeValidatorMessage.getValAddr().toByteArray())); removeValidator.setValConsAddr(Crypto.encodeAddress(hrp,removeValidatorMessage.getValConsAddr().toByteArray())); removeValidator.setProposalId(removeValidatorMessage.getProposalId()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.REMOVE_VALIDATOR); transaction.setRealTx(removeValidator); return transaction; } private Transaction convertListing(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.List listMessage = com.binance.dex.api.proto.List.parseFrom(array); Listing listing = new Listing(); listing.setProposalId(listMessage.getProposalId()); listing.setBaseAssetSymbol(listMessage.getBaseAssetSymbol()); listing.setQuoteAssetSymbol(listMessage.getQuoteAssetSymbol()); listing.setInitPrice(listMessage.getInitPrice()); listing.setFromAddr(Crypto.encodeAddress(hrp,listMessage.getFrom().toByteArray())); Transaction transaction = new Transaction(); transaction.setTxType(TxType.LISTING); transaction.setRealTx(listing); return transaction; } }
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.tasks.compile; import org.gradle.api.internal.tasks.compile.processing.AnnotationProcessorDeclaration; import java.io.File; import java.util.List; import java.util.Set; public interface JavaCompileSpec extends JvmLanguageCompileSpec { MinimalJavaCompileOptions getCompileOptions(); @Override File getDestinationDir(); /** * The annotation processor path to use. When empty, no processing should be done. When not empty, processing should be done. */ List<File> getAnnotationProcessorPath(); void setAnnotationProcessorPath(List<File> path); void setEffectiveAnnotationProcessors(Set<AnnotationProcessorDeclaration> annotationProcessors); Set<AnnotationProcessorDeclaration> getEffectiveAnnotationProcessors(); }
package com.huan.rabbitmq.advance.returnlistener; import com.rabbitmq.client.*; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * RabbitMQ 消息消费者 * * @author huan.fu * @date 2018/8/13 - 15:39 */ public class RabbitConsumer { private static final String QUEUE_NAME = "queue_demo"; private static final String IP_ADDRESS = "39.104.169.209"; private static final int PORT = 5672; public static void main(String[] args) throws IOException, TimeoutException, InterruptedException { Address[] addresses = new Address[]{new Address(IP_ADDRESS, PORT)}; ConnectionFactory connectionFactory = new ConnectionFactory(); connectionFactory.setUsername("admin"); connectionFactory.setPassword("admin"); try ( // 注意此时获取连接的方式和生产者略有不同 Connection connection = connectionFactory.newConnection(addresses) ) { // 创建信道 Channel channel = connection.createChannel(); // 设置客户端最多接收未被ack的消息个数 channel.basicQos(64); Consumer consumer = new DefaultConsumer(channel) { @Override public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { System.err.println("接收到消息:" + new String(body, StandardCharsets.UTF_8)); try { TimeUnit.SECONDS.sleep(5L); } catch (InterruptedException e) { e.printStackTrace(); Thread.currentThread().interrupt(); } System.err.println("deliveryTag:" + envelope.getDeliveryTag()); channel.basicAck(envelope.getDeliveryTag(), false); } }; channel.basicConsume(QUEUE_NAME,false, consumer); TimeUnit.SECONDS.sleep(10000000L); } } }
/* * * Copyright (c) 2020. Liquidlabs Ltd <info@liquidlabs.com> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and limitations under the License. * */ package io.fluidity.search.agg.events; import io.fluidity.search.Search; import java.io.IOException; public interface EventCollector extends AutoCloseable { /** * Note: lines must be written to an .evt (event) outputstream using: timestamp:filepos:data */ Integer[] process(boolean isCompressed, Search search, long fileFromTime, long fileToTime, long length, String timeFormat) throws IOException; }
package io.smallrye.reactive.operators; import io.reactivex.Flowable; public class MultiNever<T> extends AbstractMulti<T> { public static final MultiNever<Object> INSTANCE = new MultiNever<>(); private MultiNever() { // avoid direct instantiation. } @Override protected Flowable<T> flowable() { return Flowable.never(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.util; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleEvent; import org.apache.catalina.LifecycleListener; /** * Support class to assist in firing LifecycleEvent notifications to * registered LifecycleListeners. * * @author Craig R. McClanahan */ public final class LifecycleSupport { // ----------------------------------------------------------- Constructors /** * Construct a new LifecycleSupport object associated with the specified * Lifecycle component. * * @param lifecycle The Lifecycle component that will be the source * of events that we fire */ public LifecycleSupport(Lifecycle lifecycle) { super(); this.lifecycle = lifecycle; } // ----------------------------------------------------- Instance Variables /** * The source component for lifecycle events that we will fire. */ private Lifecycle lifecycle = null; /** * The set of registered LifecycleListeners for event notifications. */ private LifecycleListener listeners[] = new LifecycleListener[0]; private final Object listenersLock = new Object(); // Lock object for changes to listeners // --------------------------------------------------------- Public Methods /** * Add a lifecycle event listener to this component. * * @param listener The listener to add */ public void addLifecycleListener(LifecycleListener listener) { synchronized (listenersLock) { LifecycleListener results[] = new LifecycleListener[listeners.length + 1]; for (int i = 0; i < listeners.length; i++) results[i] = listeners[i]; results[listeners.length] = listener; listeners = results; } } /** * Get the lifecycle listeners associated with this lifecycle. If this * Lifecycle has no listeners registered, a zero-length array is returned. */ public LifecycleListener[] findLifecycleListeners() { return listeners; } /** * Notify all lifecycle event listeners that a particular event has * occurred for this Container. The default implementation performs * this notification synchronously using the calling thread. * * @param type Event type * @param data Event data */ public void fireLifecycleEvent(String type, Object data) { LifecycleEvent event = new LifecycleEvent(lifecycle, type, data); LifecycleListener interested[] = listeners; for (int i = 0; i < interested.length; i++) interested[i].lifecycleEvent(event); } /** * Remove a lifecycle event listener from this component. * * @param listener The listener to remove */ public void removeLifecycleListener(LifecycleListener listener) { synchronized (listenersLock) { int n = -1; for (int i = 0; i < listeners.length; i++) { if (listeners[i] == listener) { n = i; break; } } if (n < 0) return; LifecycleListener results[] = new LifecycleListener[listeners.length - 1]; int j = 0; for (int i = 0; i < listeners.length; i++) { if (i != n) results[j++] = listeners[i]; } listeners = results; } } }
package com.rahul.locationalarm.dashboard.newalarms.geofence; import android.Manifest; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.ActivityCompat; import android.util.Log; import android.widget.Toast; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.location.Geofence; import com.google.android.gms.location.GeofencingRequest; import com.google.android.gms.location.LocationServices; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.rahul.locationalarm.Constants; import com.rahul.locationalarm.dashboard.alarms.AlarmModel; import java.util.ArrayList; import java.util.List; public class GeofenceHelper implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener { private static final String TAG = GeofenceHelper.class.getSimpleName(); private static final int NO_INITIAL_TRIGGER = 0; private PendingIntent mPendingIntent; private GoogleApiClient mGoogleApiClient; private Context mContext; private AlarmModel mAlarmModel; private boolean mIsRemoving; private void initHelper(@NonNull final Context context, @NonNull final AlarmModel alarmDetail) { this.mAlarmModel = alarmDetail; this.mContext = context; } /** * To re register notification. * @param context {@link Context} * @param alarmDetails List of alarms */ public void registerForGeofence(@NonNull final Context context, @NonNull final List<AlarmModel> alarmDetails) { mIsRemoving = false; for (AlarmModel alarmDetail : alarmDetails) { initHelper(context, alarmDetail); final GoogleApiClient apiClient = getGoogleApiClient(context); // if client is not connected only then connect if (!apiClient.isConnected() && !apiClient.isConnecting()) { apiClient.connect(); } else { addGeofence(context, alarmDetail); } } } /** * To register notification. * @param context {@link Context} * @param alarmDetail Alarms */ public void registerForGeofence(@NonNull final Context context, @NonNull final AlarmModel alarmDetail) { mIsRemoving = false; initHelper(context, alarmDetail); final GoogleApiClient apiClient = getGoogleApiClient(context); // if client is not connected only then connect if (!apiClient.isConnected() && !apiClient.isConnecting()) { apiClient.connect(); } else { addGeofence(context, alarmDetail); } } public void removeGeofence(@NonNull final Context context, @NonNull final AlarmModel alarmDetail) { mIsRemoving = true; initHelper(context, alarmDetail); final GoogleApiClient apiClient = getGoogleApiClient(context); // if client is not connected only then connect if (!apiClient.isConnected() && !apiClient.isConnecting()) { apiClient.connect(); } else { removeGeofenceFromApiClient(); } } private void addGeofence(@NonNull final Context context, @NonNull final AlarmModel alarmDetail) { final Geofence geofence = new Geofence.Builder() .setRequestId(String.valueOf(alarmDetail.getId())) .setCircularRegion(alarmDetail.getLatitude(), alarmDetail.getLongitude(), Constants.GEOFENCE_RADIUS_IN_METERS) .setExpirationDuration(Geofence.NEVER_EXPIRE) .setTransitionTypes(Geofence.GEOFENCE_TRANSITION_ENTER | Geofence.GEOFENCE_TRANSITION_EXIT) .build(); final GeofencingRequest request = new GeofencingRequest.Builder() .addGeofence(geofence) .setInitialTrigger(NO_INITIAL_TRIGGER) .build(); if (ActivityCompat.checkSelfPermission(context, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED) { return; } LocationServices.getGeofencingClient(context).addGeofences(request, getPendingIntent(context)) .addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Log.d(TAG, "Failed to register Geofence"); Toast.makeText(context, "Failed to register Geofence", Toast.LENGTH_SHORT).show(); } }) .addOnSuccessListener(new OnSuccessListener<Void>() { @Override public void onSuccess(Void aVoid) { Log.d(TAG, "Successfully registered Geofence"); Toast.makeText(context, "Successfully registered Geofence", Toast.LENGTH_SHORT).show(); } }); } private void removeGeofenceFromApiClient() { final List<String> geofencesToRemove = new ArrayList<>(); geofencesToRemove.add(String.valueOf(mAlarmModel.getId())); LocationServices.getGeofencingClient(mContext).removeGeofences(geofencesToRemove) .addOnSuccessListener(new OnSuccessListener<Void>() { @Override public void onSuccess(Void aVoid) { Log.d(TAG, "Removed Geofence"); Toast.makeText(mContext, "Removed Geofence", Toast.LENGTH_SHORT).show(); } }).addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Log.d(TAG, "Failed to Remove Geofence"); Toast.makeText(mContext, "Failed to Remove Geofence", Toast.LENGTH_SHORT).show(); } }); } private GoogleApiClient getGoogleApiClient(@NonNull Context context) { if (mGoogleApiClient == null) { mGoogleApiClient = new GoogleApiClient.Builder(context) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(LocationServices.API) .build(); } return mGoogleApiClient; } private PendingIntent getPendingIntent(@NonNull Context context) { if (mPendingIntent == null) { final Intent serviceIntent = new Intent(context, GeofenceTransitionsIntentService.class); mPendingIntent = PendingIntent.getService(context, 0, serviceIntent, 0); } return mPendingIntent; } @Override public void onConnected(@Nullable Bundle bundle) { if (mIsRemoving) { removeGeofenceFromApiClient(); } else { addGeofence(mContext, mAlarmModel); } } @Override public void onConnectionSuspended(int i) { } @Override public void onConnectionFailed(@NonNull ConnectionResult connectionResult) { } public void disconnectApiClient() { if (mGoogleApiClient != null) { mGoogleApiClient.disconnect(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator; import org.apache.hadoop.hive.ql.exec.DummyStoreOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.OperatorUtils; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UnionOperator; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.GenTezUtils; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.SemiJoinBranchInfo; import org.apache.hadoop.hive.ql.plan.DynamicPruningEventDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeDynamicListDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDynamicValueDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.stats.StatsUtils; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFInBloomFilter; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.Multiset; import com.google.common.collect.Sets; import com.google.common.collect.TreeMultiset; import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.AUTOPARALLEL; import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.FIXED; import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.UNIFORM; import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.UNSET; /** * Shared computation optimizer. * * <p>Originally, this rule would find scan operators over the same table * in the query plan and merge them if they met some preconditions. * * TS TS TS * | | -&gt; / \ * Op Op Op Op * * <p>Now the rule has been extended to find opportunities to other operators * downstream, not only a single table scan. * * TS1 TS2 TS1 TS2 TS1 TS2 * | | | | | | * | RS | RS | RS * \ / \ / -&gt; \ / * MapJoin MapJoin MapJoin * | | / \ * Op Op Op Op * * <p>If the extended version of the optimizer is enabled, it can go beyond * a work boundary to find reutilization opportunities. * * <p>The optimization only works with the Tez execution engine. */ public class SharedWorkOptimizer extends Transform { private final static Logger LOG = LoggerFactory.getLogger(SharedWorkOptimizer.class); @Override public ParseContext transform(ParseContext pctx) throws SemanticException { final Map<String, TableScanOperator> topOps = pctx.getTopOps(); if (topOps.size() < 2) { // Nothing to do, bail out return pctx; } if (LOG.isDebugEnabled()) { LOG.debug("Before SharedWorkOptimizer:\n" + Operator.toString(pctx.getTopOps().values())); } // Cache to use during optimization SharedWorkOptimizerCache optimizerCache = new SharedWorkOptimizerCache(); // Gather information about the DPP table scans and store it in the cache gatherDPPTableScanOps(pctx, optimizerCache); // Map of dbName.TblName -> TSOperator ArrayListMultimap<String, TableScanOperator> tableNameToOps = splitTableScanOpsByTable(pctx); // We enforce a certain order when we do the reutilization. // In particular, we use size of table x number of reads to // rank the tables. List<Entry<String, Long>> sortedTables = rankTablesByAccumulatedSize(pctx); LOG.debug("Sorted tables by size: {}", sortedTables); // Execute shared work optimization sharedWorkOptimization(pctx, optimizerCache, tableNameToOps, sortedTables, false); if (LOG.isDebugEnabled()) { LOG.debug("After SharedWorkOptimizer:\n" + Operator.toString(pctx.getTopOps().values())); } if (pctx.getConf().getBoolVar(ConfVars.HIVE_SHARED_WORK_EXTENDED_OPTIMIZATION)) { // Execute extended shared work optimization sharedWorkExtendedOptimization(pctx, optimizerCache); if (LOG.isDebugEnabled()) { LOG.debug("After SharedWorkExtendedOptimizer:\n" + Operator.toString(pctx.getTopOps().values())); } } if (pctx.getConf().getBoolVar(ConfVars.HIVE_SHARED_WORK_SEMIJOIN_OPTIMIZATION)) { // Map of dbName.TblName -> TSOperator tableNameToOps = splitTableScanOpsByTable(pctx); // We rank by size of table x number of reads sortedTables = rankTablesByAccumulatedSize(pctx); // Execute shared work optimization with semijoin removal boolean optimized = sharedWorkOptimization(pctx, optimizerCache, tableNameToOps, sortedTables, true); if (optimized && pctx.getConf().getBoolVar(ConfVars.HIVE_SHARED_WORK_EXTENDED_OPTIMIZATION)) { // If it was further optimized, execute a second round of extended shared work optimizer sharedWorkExtendedOptimization(pctx, optimizerCache); } if (LOG.isDebugEnabled()) { LOG.debug("After SharedWorkSJOptimizer:\n" + Operator.toString(pctx.getTopOps().values())); } } if(pctx.getConf().getBoolVar(ConfVars.HIVE_SHARED_WORK_REUSE_MAPJOIN_CACHE)) { // Try to reuse cache for broadcast side in mapjoin operators that // share same input. // First we group together all the mapjoin operators that share same // reduce sink operator. final Multimap<Operator<?>, MapJoinOperator> parentToMapJoinOperators = ArrayListMultimap.create(); final Set<Operator<?>> visitedOperators = new HashSet<>(); for (Entry<Operator<?>, Collection<Operator<?>>> e : optimizerCache.operatorToWorkOperators.asMap().entrySet()) { if (visitedOperators.contains(e.getKey())) { // Already visited this work, we move on continue; } for (Operator<?> op : e.getValue()) { if (op instanceof MapJoinOperator) { MapJoinOperator mapJoinOp = (MapJoinOperator) op; // Only allowed for mapjoin operator if (!mapJoinOp.getConf().isBucketMapJoin() && !mapJoinOp.getConf().isDynamicPartitionHashJoin()) { parentToMapJoinOperators.put( obtainBroadcastInput(mapJoinOp).getParentOperators().get(0), mapJoinOp); } } visitedOperators.add(op); } } // For each group, set the cache key accordingly if there is more than one operator // and input RS operator are equal for (Collection<MapJoinOperator> c : parentToMapJoinOperators.asMap().values()) { Map<ReduceSinkOperator, String> rsOpToCacheKey = new HashMap<>(); for (MapJoinOperator mapJoinOp : c) { ReduceSinkOperator rsOp = obtainBroadcastInput(mapJoinOp); String cacheKey = null; for (Entry<ReduceSinkOperator, String> e: rsOpToCacheKey.entrySet()) { if (compareOperator(pctx, rsOp, e.getKey())) { cacheKey = e.getValue(); break; } } if (cacheKey == null) { // Either it is the first map join operator or there was no equivalent RS, // hence generate cache key cacheKey = MapJoinDesc.generateCacheKey(mapJoinOp.getOperatorId()); rsOpToCacheKey.put(rsOp, cacheKey); } // Set in the conf of the map join operator mapJoinOp.getConf().setCacheKey(cacheKey); } } } // If we are running tests, we are going to verify that the contents of the cache // correspond with the contents of the plan, and otherwise we fail. // This check always run when we are running in test mode, independently on whether // we use the basic or the extended version of the optimizer. if (pctx.getConf().getBoolVar(ConfVars.HIVE_IN_TEST)) { Set<Operator<?>> visited = new HashSet<>(); Iterator<Entry<String, TableScanOperator>> it = topOps.entrySet().iterator(); while (it.hasNext()) { Entry<String, TableScanOperator> e = it.next(); for (Operator<?> op : OperatorUtils.findOperators(e.getValue(), Operator.class)) { if (!visited.contains(op)) { Set<Operator<?>> workCachedOps = findWorkOperators(optimizerCache, op); Set<Operator<?>> workPlanOps = findWorkOperators(op, new HashSet<>()); if (!workCachedOps.equals(workPlanOps)) { throw new SemanticException("Error in shared work optimizer: operator cache contents " + "and actual plan differ\nIn cache: " + workCachedOps + "\nIn plan: " + workPlanOps); } visited.add(op); } } } } return pctx; } private static boolean sharedWorkOptimization(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, ArrayListMultimap<String, TableScanOperator> tableNameToOps, List<Entry<String, Long>> sortedTables, boolean removeSemijoin) throws SemanticException { // Boolean to keep track of whether this method actually merged any TS operators boolean mergedExecuted = false; Multimap<String, TableScanOperator> existingOps = ArrayListMultimap.create(); Set<Operator<?>> removedOps = new HashSet<>(); for (Entry<String, Long> tablePair : sortedTables) { String tableName = tablePair.getKey(); for (TableScanOperator discardableTsOp : tableNameToOps.get(tableName)) { if (removedOps.contains(discardableTsOp)) { LOG.debug("Skip {} as it has already been removed", discardableTsOp); continue; } Collection<TableScanOperator> prevTsOps = existingOps.get(tableName); for (TableScanOperator retainableTsOp : prevTsOps) { if (removedOps.contains(retainableTsOp)) { LOG.debug("Skip {} as it has already been removed", retainableTsOp); continue; } SharedResult sr; if (removeSemijoin) { // We check if the two table scan operators can actually be merged modulo SJs. // Hence, two conditions should be met: // (i) the TS ops should be mergeable excluding any kind of DPP, and // (ii) the DPP branches (excluding SJs) should be the same boolean mergeable = areMergeable(pctx, optimizerCache, retainableTsOp, discardableTsOp); if (!mergeable) { // Skip LOG.debug("{} and {} cannot be merged", retainableTsOp, discardableTsOp); continue; } boolean validMerge = areMergeableExcludeSemijoinsExtendedCheck( pctx, optimizerCache, retainableTsOp, discardableTsOp); if (!validMerge) { // Skip LOG.debug("{} and {} do not meet preconditions", retainableTsOp, discardableTsOp); continue; } // If tests pass, we create the shared work optimizer additional information // about the part of the tree that can be merged. We need to regenerate the // cache because semijoin operators have been removed sr = extractSharedOptimizationInfoForRoot( pctx, optimizerCache, retainableTsOp, discardableTsOp); } else { // First we quickly check if the two table scan operators can actually be merged if (!areMergeable(pctx, optimizerCache, retainableTsOp, discardableTsOp) || !areMergeableExtendedCheck(pctx, optimizerCache, retainableTsOp, discardableTsOp)) { // Skip LOG.debug("{} and {} cannot be merged", retainableTsOp, discardableTsOp); continue; } // Secondly, we extract information about the part of the tree that can be merged // as well as some structural information (memory consumption) that needs to be // used to determined whether the merge can happen sr = extractSharedOptimizationInfoForRoot( pctx, optimizerCache, retainableTsOp, discardableTsOp); // It seems these two operators can be merged. // Check that plan meets some preconditions before doing it. // In particular, in the presence of map joins in the upstream plan: // - we cannot exceed the noconditional task size, and // - if we already merged the big table, we cannot merge the broadcast // tables. if (!validPreConditions(pctx, optimizerCache, sr)) { // Skip LOG.debug("{} and {} do not meet preconditions", retainableTsOp, discardableTsOp); continue; } } // We can merge mergedExecuted = true; if (sr.retainableOps.size() > 1) { // More than TS operator Operator<?> lastRetainableOp = sr.retainableOps.get(sr.retainableOps.size() - 1); Operator<?> lastDiscardableOp = sr.discardableOps.get(sr.discardableOps.size() - 1); if (lastDiscardableOp.getNumChild() != 0) { List<Operator<? extends OperatorDesc>> allChildren = Lists.newArrayList(lastDiscardableOp.getChildOperators()); for (Operator<? extends OperatorDesc> op : allChildren) { lastDiscardableOp.getChildOperators().remove(op); op.replaceParent(lastDiscardableOp, lastRetainableOp); lastRetainableOp.getChildOperators().add(op); } } LOG.debug("Merging subtree starting at {} into subtree starting at {}", discardableTsOp, retainableTsOp); } else { if (retainableTsOp.getConf().getFilterExpr() != null) { // Push filter on top of children for retainable pushFilterToTopOfTableScan(optimizerCache, retainableTsOp); } if (discardableTsOp.getConf().getFilterExpr() != null) { // Push filter on top of children for discardable pushFilterToTopOfTableScan(optimizerCache, discardableTsOp); } // Obtain filter for shared TS operator ExprNodeGenericFuncDesc exprNode = null; if (retainableTsOp.getConf().getFilterExpr() != null && discardableTsOp.getConf().getFilterExpr() != null) { // Combine exprNode = retainableTsOp.getConf().getFilterExpr(); ExprNodeGenericFuncDesc tsExprNode = discardableTsOp.getConf().getFilterExpr(); if (!exprNode.isSame(tsExprNode)) { // We merge filters from previous scan by ORing with filters from current scan if (exprNode.getGenericUDF() instanceof GenericUDFOPOr) { List<ExprNodeDesc> newChildren = new ArrayList<>(exprNode.getChildren().size() + 1); for (ExprNodeDesc childExprNode : exprNode.getChildren()) { if (childExprNode.isSame(tsExprNode)) { // We do not need to do anything, it is in the OR expression break; } newChildren.add(childExprNode); } if (exprNode.getChildren().size() == newChildren.size()) { newChildren.add(tsExprNode); exprNode = ExprNodeGenericFuncDesc.newInstance( new GenericUDFOPOr(), newChildren); } } else { exprNode = ExprNodeGenericFuncDesc.newInstance( new GenericUDFOPOr(), Arrays.<ExprNodeDesc>asList(exprNode, tsExprNode)); } } } // Replace filter retainableTsOp.getConf().setFilterExpr(exprNode); // Replace table scan operator List<Operator<? extends OperatorDesc>> allChildren = Lists.newArrayList(discardableTsOp.getChildOperators()); for (Operator<? extends OperatorDesc> op : allChildren) { discardableTsOp.getChildOperators().remove(op); op.replaceParent(discardableTsOp, retainableTsOp); retainableTsOp.getChildOperators().add(op); } LOG.debug("Merging {} into {}", discardableTsOp, retainableTsOp); } // First we remove the input operators of the expression that // we are going to eliminate for (Operator<?> op : sr.discardableInputOps) { OperatorUtils.removeOperator(op); optimizerCache.removeOp(op); removedOps.add(op); // Remove DPP predicates if (op instanceof ReduceSinkOperator) { SemiJoinBranchInfo sjbi = pctx.getRsToSemiJoinBranchInfo().get(op); if (sjbi != null && !sr.discardableOps.contains(sjbi.getTsOp()) && !sr.discardableInputOps.contains(sjbi.getTsOp())) { GenTezUtils.removeSemiJoinOperator( pctx, (ReduceSinkOperator) op, sjbi.getTsOp()); optimizerCache.tableScanToDPPSource.remove(sjbi.getTsOp(), op); } } else if (op instanceof AppMasterEventOperator) { DynamicPruningEventDesc dped = (DynamicPruningEventDesc) op.getConf(); if (!sr.discardableOps.contains(dped.getTableScan()) && !sr.discardableInputOps.contains(dped.getTableScan())) { GenTezUtils.removeSemiJoinOperator( pctx, (AppMasterEventOperator) op, dped.getTableScan()); optimizerCache.tableScanToDPPSource.remove(dped.getTableScan(), op); } } LOG.debug("Input operator removed: {}", op); } // Then we merge the operators of the works we are going to merge optimizerCache.removeOpAndCombineWork(discardableTsOp, retainableTsOp); removedOps.add(discardableTsOp); // Finally we remove the expression from the tree for (Operator<?> op : sr.discardableOps) { OperatorUtils.removeOperator(op); optimizerCache.removeOp(op); removedOps.add(op); if (sr.discardableOps.size() == 1) { // If there is a single discardable operator, it is a TableScanOperator // and it means that we have merged filter expressions for it. Thus, we // might need to remove DPP predicates from the retainable TableScanOperator Collection<Operator<?>> c = optimizerCache.tableScanToDPPSource.get((TableScanOperator) op); for (Operator<?> dppSource : c) { if (dppSource instanceof ReduceSinkOperator) { GenTezUtils.removeSemiJoinOperator(pctx, (ReduceSinkOperator) dppSource, (TableScanOperator) sr.retainableOps.get(0)); optimizerCache.tableScanToDPPSource.remove(sr.retainableOps.get(0), op); } else if (dppSource instanceof AppMasterEventOperator) { GenTezUtils.removeSemiJoinOperator(pctx, (AppMasterEventOperator) dppSource, (TableScanOperator) sr.retainableOps.get(0)); optimizerCache.tableScanToDPPSource.remove(sr.retainableOps.get(0), op); } } } LOG.debug("Operator removed: {}", op); } break; } if (removedOps.contains(discardableTsOp)) { // This operator has been removed, remove it from the list of existing operators existingOps.remove(tableName, discardableTsOp); } else { // This operator has not been removed, include it in the list of existing operators existingOps.put(tableName, discardableTsOp); } } } // Remove unused table scan operators pctx.getTopOps().entrySet().removeIf( (Entry<String, TableScanOperator> e) -> e.getValue().getNumChild() == 0); return mergedExecuted; } private static void sharedWorkExtendedOptimization(ParseContext pctx, SharedWorkOptimizerCache optimizerCache) throws SemanticException { // Gather RS operators that 1) belong to root works, i.e., works containing TS operators, // and 2) share the same input operator. // These will be the first target for extended shared work optimization Multimap<Operator<?>, ReduceSinkOperator> parentToRsOps = ArrayListMultimap.create(); Set<Operator<?>> visited = new HashSet<>(); for (Entry<String, TableScanOperator> e : pctx.getTopOps().entrySet()) { gatherReduceSinkOpsByInput(parentToRsOps, visited, findWorkOperators(optimizerCache, e.getValue())); } Set<Operator<?>> removedOps = new HashSet<>(); while (!parentToRsOps.isEmpty()) { // As above, we enforce a certain order when we do the reutilization. // In particular, we use size of data in RS x number of uses. List<Entry<Operator<?>, Long>> sortedRSGroups = rankOpsByAccumulatedSize(parentToRsOps.keySet()); LOG.debug("Sorted operators by size: {}", sortedRSGroups); // Execute extended optimization // For each RS, check whether other RS in same work could be merge into this one. // If they are merged, RS operators in the resulting work will be considered // mergeable in next loop iteration. Multimap<Operator<?>, ReduceSinkOperator> existingRsOps = ArrayListMultimap.create(); for (Entry<Operator<?>, Long> rsGroupInfo : sortedRSGroups) { Operator<?> rsParent = rsGroupInfo.getKey(); for (ReduceSinkOperator discardableRsOp : parentToRsOps.get(rsParent)) { if (removedOps.contains(discardableRsOp)) { LOG.debug("Skip {} as it has already been removed", discardableRsOp); continue; } Collection<ReduceSinkOperator> otherRsOps = existingRsOps.get(rsParent); for (ReduceSinkOperator retainableRsOp : otherRsOps) { if (removedOps.contains(retainableRsOp)) { LOG.debug("Skip {} as it has already been removed", retainableRsOp); continue; } // First we quickly check if the two RS operators can actually be merged. // We already know that these two RS operators have the same parent, but // we need to check whether both RS are actually equal. Further, we check // whether their child is also equal. If any of these conditions are not // met, we are not going to try to merge. boolean mergeable = compareOperator(pctx, retainableRsOp, discardableRsOp) && compareOperator(pctx, retainableRsOp.getChildOperators().get(0), discardableRsOp.getChildOperators().get(0)); if (!mergeable) { // Skip LOG.debug("{} and {} cannot be merged", retainableRsOp, discardableRsOp); continue; } LOG.debug("Checking additional conditions for merging subtree starting at {}" + " into subtree starting at {}", discardableRsOp, retainableRsOp); // Secondly, we extract information about the part of the tree that can be merged // as well as some structural information (memory consumption) that needs to be // used to determined whether the merge can happen Operator<?> retainableRsOpChild = retainableRsOp.getChildOperators().get(0); Operator<?> discardableRsOpChild = discardableRsOp.getChildOperators().get(0); SharedResult sr = extractSharedOptimizationInfo( pctx, optimizerCache, retainableRsOp, discardableRsOp, retainableRsOpChild, discardableRsOpChild); // It seems these two operators can be merged. // Check that plan meets some preconditions before doing it. // In particular, in the presence of map joins in the upstream plan: // - we cannot exceed the noconditional task size, and // - if we already merged the big table, we cannot merge the broadcast // tables. if (sr.retainableOps.isEmpty() || !validPreConditions(pctx, optimizerCache, sr)) { // Skip LOG.debug("{} and {} do not meet preconditions", retainableRsOp, discardableRsOp); continue; } deduplicateReduceTraits(retainableRsOp.getConf(), discardableRsOp.getConf()); // We can merge Operator<?> lastRetainableOp = sr.retainableOps.get(sr.retainableOps.size() - 1); Operator<?> lastDiscardableOp = sr.discardableOps.get(sr.discardableOps.size() - 1); if (lastDiscardableOp.getNumChild() != 0) { List<Operator<? extends OperatorDesc>> allChildren = Lists.newArrayList(lastDiscardableOp.getChildOperators()); for (Operator<? extends OperatorDesc> op : allChildren) { lastDiscardableOp.getChildOperators().remove(op); op.replaceParent(lastDiscardableOp, lastRetainableOp); lastRetainableOp.getChildOperators().add(op); } } LOG.debug("Merging subtree starting at {} into subtree starting at {}", discardableRsOp, retainableRsOp); // First we remove the input operators of the expression that // we are going to eliminate for (Operator<?> op : sr.discardableInputOps) { OperatorUtils.removeOperator(op); optimizerCache.removeOp(op); removedOps.add(op); // Remove DPP predicates if (op instanceof ReduceSinkOperator) { SemiJoinBranchInfo sjbi = pctx.getRsToSemiJoinBranchInfo().get(op); if (sjbi != null && !sr.discardableOps.contains(sjbi.getTsOp()) && !sr.discardableInputOps.contains(sjbi.getTsOp())) { GenTezUtils.removeSemiJoinOperator( pctx, (ReduceSinkOperator) op, sjbi.getTsOp()); optimizerCache.tableScanToDPPSource.remove(sjbi.getTsOp(), op); } } else if (op instanceof AppMasterEventOperator) { DynamicPruningEventDesc dped = (DynamicPruningEventDesc) op.getConf(); if (!sr.discardableOps.contains(dped.getTableScan()) && !sr.discardableInputOps.contains(dped.getTableScan())) { GenTezUtils.removeSemiJoinOperator( pctx, (AppMasterEventOperator) op, dped.getTableScan()); optimizerCache.tableScanToDPPSource.remove(dped.getTableScan(), op); } } LOG.debug("Input operator removed: {}", op); } // We remove the discardable RS operator OperatorUtils.removeOperator(discardableRsOp); optimizerCache.removeOp(discardableRsOp); removedOps.add(discardableRsOp); LOG.debug("Operator removed: {}", discardableRsOp); // Then we merge the operators of the works we are going to merge optimizerCache.removeOpAndCombineWork(discardableRsOpChild, retainableRsOpChild); // Finally we remove the rest of the expression from the tree for (Operator<?> op : sr.discardableOps) { OperatorUtils.removeOperator(op); optimizerCache.removeOp(op); removedOps.add(op); LOG.debug("Operator removed: {}", op); } break; } if (removedOps.contains(discardableRsOp)) { // This operator has been removed, remove it from the list of existing operators existingRsOps.remove(rsParent, discardableRsOp); } else { // This operator has not been removed, include it in the list of existing operators existingRsOps.put(rsParent, discardableRsOp); } } } // We gather the operators that will be used for next iteration of extended optimization // (if any) parentToRsOps = ArrayListMultimap.create(); visited = new HashSet<>(); for (Entry<Operator<?>, ReduceSinkOperator> e : existingRsOps.entries()) { if (removedOps.contains(e.getValue()) || e.getValue().getNumChild() < 1) { // If 1) RS has been removed, or 2) it does not have a child (for instance, it is a // semijoin RS), we can quickly skip this one continue; } gatherReduceSinkOpsByInput(parentToRsOps, visited, findWorkOperators(optimizerCache, e.getValue().getChildOperators().get(0))); } } // Remove unused table scan operators pctx.getTopOps().entrySet().removeIf( (Entry<String, TableScanOperator> e) -> e.getValue().getNumChild() == 0); } /** * Obtain the RS input for a mapjoin operator. */ private static ReduceSinkOperator obtainBroadcastInput(MapJoinOperator mapJoinOp) { return mapJoinOp.getParentOperators().get(0) instanceof ReduceSinkOperator ? (ReduceSinkOperator) mapJoinOp.getParentOperators().get(0) : (ReduceSinkOperator) mapJoinOp.getParentOperators().get(1); } /** * This method gathers the TS operators with DPP from the context and * stores them into the input optimization cache. */ private static void gatherDPPTableScanOps( ParseContext pctx, SharedWorkOptimizerCache optimizerCache) throws SemanticException { // Find TS operators with partition pruning enabled in plan // because these TS may potentially read different data for // different pipeline. // These can be: // 1) TS with DPP. // 2) TS with semijoin DPP. Map<String, TableScanOperator> topOps = pctx.getTopOps(); Collection<Operator<? extends OperatorDesc>> tableScanOps = Lists.<Operator<?>>newArrayList(topOps.values()); Set<AppMasterEventOperator> s = OperatorUtils.findOperators(tableScanOps, AppMasterEventOperator.class); for (AppMasterEventOperator a : s) { if (a.getConf() instanceof DynamicPruningEventDesc) { DynamicPruningEventDesc dped = (DynamicPruningEventDesc) a.getConf(); optimizerCache.tableScanToDPPSource.put(dped.getTableScan(), a); } } for (Entry<ReduceSinkOperator, SemiJoinBranchInfo> e : pctx.getRsToSemiJoinBranchInfo().entrySet()) { optimizerCache.tableScanToDPPSource.put(e.getValue().getTsOp(), e.getKey()); } LOG.debug("DPP information stored in the cache: {}", optimizerCache.tableScanToDPPSource); } private static ArrayListMultimap<String, TableScanOperator> splitTableScanOpsByTable( ParseContext pctx) { ArrayListMultimap<String, TableScanOperator> tableNameToOps = ArrayListMultimap.create(); // Sort by operator ID so we get deterministic results Map<String, TableScanOperator> sortedTopOps = new TreeMap<>(pctx.getTopOps()); for (Entry<String, TableScanOperator> e : sortedTopOps.entrySet()) { TableScanOperator tsOp = e.getValue(); tableNameToOps.put( tsOp.getConf().getTableMetadata().getDbName() + "." + tsOp.getConf().getTableMetadata().getTableName(), tsOp); } return tableNameToOps; } private static List<Entry<String, Long>> rankTablesByAccumulatedSize(ParseContext pctx) { Map<String, Long> tableToTotalSize = new HashMap<>(); for (Entry<String, TableScanOperator> e : pctx.getTopOps().entrySet()) { TableScanOperator tsOp = e.getValue(); String tableName = tsOp.getConf().getTableMetadata().getDbName() + "." + tsOp.getConf().getTableMetadata().getTableName(); long tableSize = tsOp.getStatistics() != null ? tsOp.getStatistics().getDataSize() : 0L; Long totalSize = tableToTotalSize.get(tableName); if (totalSize != null) { tableToTotalSize.put(tableName, StatsUtils.safeAdd(totalSize, tableSize)); } else { tableToTotalSize.put(tableName, tableSize); } } List<Entry<String, Long>> sortedTables = new ArrayList<>(tableToTotalSize.entrySet()); Collections.sort(sortedTables, Collections.reverseOrder( new Comparator<Map.Entry<String, Long>>() { @Override public int compare(Map.Entry<String, Long> o1, Map.Entry<String, Long> o2) { return (o1.getValue()).compareTo(o2.getValue()); } })); return sortedTables; } private static void gatherReduceSinkOpsByInput(Multimap<Operator<?>, ReduceSinkOperator> parentToRsOps, Set<Operator<?>> visited, Set<Operator<?>> ops) { for (Operator<?> op : ops) { // If the RS has other RS siblings, we will add it to be considered in next iteration if (op instanceof ReduceSinkOperator && !visited.contains(op)) { Operator<?> parent = op.getParentOperators().get(0); Set<ReduceSinkOperator> s = new LinkedHashSet<>(); for (Operator<?> c : parent.getChildOperators()) { if (c instanceof ReduceSinkOperator) { s.add((ReduceSinkOperator) c); visited.add(c); } } if (s.size() > 1) { parentToRsOps.putAll(parent, s); } } } } private static List<Entry<Operator<?>, Long>> rankOpsByAccumulatedSize(Set<Operator<?>> opsSet) { Map<Operator<?>, Long> opToTotalSize = new HashMap<>(); for (Operator<?> op : opsSet) { long size = op.getStatistics() != null ? op.getStatistics().getDataSize() : 0L; opToTotalSize.put(op, StatsUtils.safeMult(op.getChildOperators().size(), size)); } List<Entry<Operator<?>, Long>> sortedOps = new ArrayList<>(opToTotalSize.entrySet()); Collections.sort(sortedOps, Collections.reverseOrder( new Comparator<Map.Entry<Operator<?>, Long>>() { @Override public int compare(Map.Entry<Operator<?>, Long> o1, Map.Entry<Operator<?>, Long> o2) { int valCmp = o1.getValue().compareTo(o2.getValue()); if (valCmp == 0) { return o1.getKey().toString().compareTo(o2.getKey().toString()); } return valCmp; } })); return sortedOps; } // FIXME: probably this should also be integrated with isSame() logics private static boolean areMergeable(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, TableScanOperator tsOp1, TableScanOperator tsOp2) throws SemanticException { // First we check if the two table scan operators can actually be merged // If schemas do not match, we currently do not merge List<String> prevTsOpNeededColumns = tsOp1.getNeededColumns(); List<String> tsOpNeededColumns = tsOp2.getNeededColumns(); if (prevTsOpNeededColumns.size() != tsOpNeededColumns.size()) { return false; } boolean notEqual = false; for (int i = 0; i < prevTsOpNeededColumns.size(); i++) { if (!prevTsOpNeededColumns.get(i).equals(tsOpNeededColumns.get(i))) { notEqual = true; break; } } if (notEqual) { return false; } // If row limit does not match, we currently do not merge if (tsOp1.getConf().getRowLimit() != tsOp2.getConf().getRowLimit()) { return false; } // If table properties do not match, we currently do not merge if (!Objects.equals(tsOp1.getConf().getOpProps(), tsOp2.getConf().getOpProps())) { return false; } // If partitions do not match, we currently do not merge PrunedPartitionList prevTsOpPPList = pctx.getPrunedPartitions(tsOp1); PrunedPartitionList tsOpPPList = pctx.getPrunedPartitions(tsOp2); if (!prevTsOpPPList.getPartitions().equals(tsOpPPList.getPartitions())) { return false; } return true; } private static boolean areMergeableExtendedCheck(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, TableScanOperator tsOp1, TableScanOperator tsOp2) throws SemanticException { // If is a DPP, check if actually it refers to same target, column, etc. // Further, the DPP value needs to be generated from same subtree List<Operator<?>> dppsOp1 = new ArrayList<>(optimizerCache.tableScanToDPPSource.get(tsOp1)); List<Operator<?>> dppsOp2 = new ArrayList<>(optimizerCache.tableScanToDPPSource.get(tsOp2)); if (dppsOp1.isEmpty() && dppsOp2.isEmpty()) { return true; } for (int i = 0; i < dppsOp1.size(); i++) { Operator<?> op = dppsOp1.get(i); if (op instanceof ReduceSinkOperator) { Set<Operator<?>> ascendants = findAscendantWorkOperators(pctx, optimizerCache, op); if (ascendants.contains(tsOp2)) { // This should not happen, we cannot merge return false; } } } for (int i = 0; i < dppsOp2.size(); i++) { Operator<?> op = dppsOp2.get(i); if (op instanceof ReduceSinkOperator) { Set<Operator<?>> ascendants = findAscendantWorkOperators(pctx, optimizerCache, op); if (ascendants.contains(tsOp1)) { // This should not happen, we cannot merge return false; } } } if (dppsOp1.size() != dppsOp2.size()) { // Only first or second operator contains DPP pruning return false; } // Check if DPP branches are equal BitSet bs = new BitSet(); for (int i = 0; i < dppsOp1.size(); i++) { Operator<?> dppOp1 = dppsOp1.get(i); for (int j = 0; j < dppsOp2.size(); j++) { if (!bs.get(j)) { // If not visited yet Operator<?> dppOp2 = dppsOp2.get(j); if (compareAndGatherOps(pctx, dppOp1, dppOp2) != null) { // The DPP operator/branch are equal bs.set(j); break; } } } if (bs.cardinality() < i + 1) { return false; } } return true; } private static boolean areMergeableExcludeSemijoinsExtendedCheck(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, TableScanOperator tsOp1, TableScanOperator tsOp2) throws SemanticException { // We remove RS-based SJs from consideration, then we compare List<Operator<?>> dppsOp1 = new ArrayList<>(optimizerCache.tableScanToDPPSource.get(tsOp1)); boolean removedDppOp1 = false; List<ReduceSinkOperator> rsOpsSemijoin1 = new ArrayList<>(); List<Operator<?>> dppsOp2 = new ArrayList<>(optimizerCache.tableScanToDPPSource.get(tsOp2)); boolean removedDppOp2 = false; List<ReduceSinkOperator> rsOpsSemijoin2 = new ArrayList<>(); for (int i = 0; i < dppsOp1.size(); i++) { Operator<?> op = dppsOp1.get(i); if (op instanceof ReduceSinkOperator) { ReduceSinkOperator semijoinRSOp = (ReduceSinkOperator) op; if (pctx.getRsToSemiJoinBranchInfo().get(semijoinRSOp).getIsHint()) { // This is a hint, we should keep it, hence we bail out return false; } rsOpsSemijoin1.add(semijoinRSOp); dppsOp1.remove(i); removedDppOp1 = true; } } for (int i = 0; i < dppsOp2.size(); i++) { Operator<?> op = dppsOp2.get(i); if (op instanceof ReduceSinkOperator) { ReduceSinkOperator semijoinRSOp = (ReduceSinkOperator) op; if (pctx.getRsToSemiJoinBranchInfo().get(semijoinRSOp).getIsHint()) { // This is a hint, we should keep it, hence we bail out return false; } rsOpsSemijoin2.add(semijoinRSOp); dppsOp2.remove(i); removedDppOp2 = true; } } if (removedDppOp1 && removedDppOp2) { // TODO: We do not merge, since currently we only merge when one of the TS operators // are not targetted by a SJ edge return false; } if (!removedDppOp1 && !removedDppOp2) { // None of them are targetted by a SJ, we skip them return false; } if (dppsOp1.size() != dppsOp2.size()) { // We cannot merge, we move to the next couple return false; } // Check if DPP branches are equal boolean equalBranches = true; BitSet bs = new BitSet(); for (int i = 0; i < dppsOp1.size(); i++) { Operator<?> dppOp1 = dppsOp1.get(i); for (int j = 0; j < dppsOp2.size(); j++) { if (!bs.get(j)) { // If not visited yet Operator<?> dppOp2 = dppsOp2.get(j); if (compareAndGatherOps(pctx, dppOp1, dppOp2) != null) { // The DPP operator/branch are equal bs.set(j); break; } } } if (bs.cardinality() < i + 1) { // We cannot merge, we move to the next group equalBranches = false; break; } } if (!equalBranches) { // Skip return false; } // We reached here, other DPP is the same, these two could potentially be merged. // Hence, we perform the last check. To do this, we remove the SJ operators, // but we remember their position in the plan. After that, we will reintroduce // the SJ operator. If the checks were valid, we will merge and remove the semijoin. // If the rest of tests to merge do not pass, we will abort the shared scan optimization // and we are done TableScanOperator targetTSOp; List<ReduceSinkOperator> semijoinRsOps; List<SemiJoinBranchInfo> sjBranches = new ArrayList<>(); if (removedDppOp1) { targetTSOp = tsOp1; semijoinRsOps = rsOpsSemijoin1; } else { targetTSOp = tsOp2; semijoinRsOps = rsOpsSemijoin2; } optimizerCache.tableScanToDPPSource.get(targetTSOp).removeAll(semijoinRsOps); for (ReduceSinkOperator rsOp : semijoinRsOps) { sjBranches.add(pctx.getRsToSemiJoinBranchInfo().remove(rsOp)); } boolean validMerge = validPreConditions(pctx, optimizerCache, extractSharedOptimizationInfoForRoot(pctx, optimizerCache, tsOp1, tsOp2)); if (validMerge) { // We are going to merge, hence we remove the semijoins completely for (ReduceSinkOperator semijoinRsOp : semijoinRsOps) { Operator<?> branchOp = GenTezUtils.removeBranch(semijoinRsOp); while (branchOp != null) { optimizerCache.removeOp(branchOp); branchOp = branchOp.getNumChild() > 0 ? branchOp.getChildOperators().get(0) : null; } GenTezUtils.removeSemiJoinOperator(pctx, semijoinRsOp, targetTSOp); } } else { // Otherwise, the put the semijoins back in the auxiliary data structures optimizerCache.tableScanToDPPSource.get(targetTSOp).addAll(semijoinRsOps); for (int i = 0; i < semijoinRsOps.size(); i++) { pctx.getRsToSemiJoinBranchInfo().put(semijoinRsOps.get(i), sjBranches.get(i)); } } return validMerge; } private static SharedResult extractSharedOptimizationInfoForRoot(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, TableScanOperator retainableTsOp, TableScanOperator discardableTsOp) throws SemanticException { LinkedHashSet<Operator<?>> retainableOps = new LinkedHashSet<>(); LinkedHashSet<Operator<?>> discardableOps = new LinkedHashSet<>(); Set<Operator<?>> discardableInputOps = new HashSet<>(); long dataSize = 0L; long maxDataSize = 0L; retainableOps.add(retainableTsOp); discardableOps.add(discardableTsOp); Operator<?> equalOp1 = retainableTsOp; Operator<?> equalOp2 = discardableTsOp; if (equalOp1.getNumChild() > 1 || equalOp2.getNumChild() > 1) { // TODO: Support checking multiple child operators to merge further. discardableInputOps.addAll(gatherDPPBranchOps(pctx, optimizerCache, discardableOps)); return new SharedResult(retainableOps, discardableOps, discardableInputOps, dataSize, maxDataSize); } Operator<?> currentOp1 = retainableTsOp.getChildOperators().get(0); Operator<?> currentOp2 = discardableTsOp.getChildOperators().get(0); // Special treatment for Filter operator that ignores the DPP predicates if (currentOp1 instanceof FilterOperator && currentOp2 instanceof FilterOperator) { boolean equalFilters = false; FilterDesc op1Conf = ((FilterOperator) currentOp1).getConf(); FilterDesc op2Conf = ((FilterOperator) currentOp2).getConf(); if (op1Conf.getIsSamplingPred() == op2Conf.getIsSamplingPred() && StringUtils.equals(op1Conf.getSampleDescExpr(), op2Conf.getSampleDescExpr())) { Multiset<String> conjsOp1String = extractConjsIgnoringDPPPreds(op1Conf.getPredicate()); Multiset<String> conjsOp2String = extractConjsIgnoringDPPPreds(op2Conf.getPredicate()); if (conjsOp1String.equals(conjsOp2String)) { equalFilters = true; } } if (equalFilters) { equalOp1 = currentOp1; equalOp2 = currentOp2; retainableOps.add(equalOp1); discardableOps.add(equalOp2); if (currentOp1.getChildOperators().size() > 1 || currentOp2.getChildOperators().size() > 1) { // TODO: Support checking multiple child operators to merge further. discardableInputOps.addAll(gatherDPPBranchOps(pctx, optimizerCache, discardableOps)); discardableInputOps.addAll(gatherDPPBranchOps(pctx, optimizerCache, retainableOps, discardableInputOps)); return new SharedResult(retainableOps, discardableOps, discardableInputOps, dataSize, maxDataSize); } currentOp1 = currentOp1.getChildOperators().get(0); currentOp2 = currentOp2.getChildOperators().get(0); } else { // Bail out discardableInputOps.addAll(gatherDPPBranchOps(pctx, optimizerCache, discardableOps)); discardableInputOps.addAll(gatherDPPBranchOps(pctx, optimizerCache, retainableOps, discardableInputOps)); return new SharedResult(retainableOps, discardableOps, discardableInputOps, dataSize, maxDataSize); } } return extractSharedOptimizationInfo(pctx, optimizerCache, equalOp1, equalOp2, currentOp1, currentOp2, retainableOps, discardableOps, discardableInputOps); } private static SharedResult extractSharedOptimizationInfo(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Operator<?> retainableOpEqualParent, Operator<?> discardableOpEqualParent, Operator<?> retainableOp, Operator<?> discardableOp) throws SemanticException { return extractSharedOptimizationInfo(pctx, optimizerCache, retainableOpEqualParent, discardableOpEqualParent, retainableOp, discardableOp, new LinkedHashSet<>(), new LinkedHashSet<>(), new HashSet<>()); } private static SharedResult extractSharedOptimizationInfo(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Operator<?> retainableOpEqualParent, Operator<?> discardableOpEqualParent, Operator<?> retainableOp, Operator<?> discardableOp, LinkedHashSet<Operator<?>> retainableOps, LinkedHashSet<Operator<?>> discardableOps, Set<Operator<?>> discardableInputOps) throws SemanticException { Operator<?> equalOp1 = retainableOpEqualParent; Operator<?> equalOp2 = discardableOpEqualParent; Operator<?> currentOp1 = retainableOp; Operator<?> currentOp2 = discardableOp; long dataSize = 0L; long maxDataSize = 0L; // Try to merge rest of operators while (!(currentOp1 instanceof ReduceSinkOperator)) { // Check whether current operators are equal if (!compareOperator(pctx, currentOp1, currentOp2)) { // If they are not equal, we could zip up till here break; } if (currentOp1.getParentOperators().size() != currentOp2.getParentOperators().size()) { // If they are not equal, we could zip up till here break; } if (currentOp1.getParentOperators().size() > 1) { List<Operator<?>> discardableOpsForCurrentOp = new ArrayList<>(); int idx = 0; for (; idx < currentOp1.getParentOperators().size(); idx++) { Operator<?> parentOp1 = currentOp1.getParentOperators().get(idx); Operator<?> parentOp2 = currentOp2.getParentOperators().get(idx); if (parentOp1 == equalOp1 && parentOp2 == equalOp2) { continue; } if ((parentOp1 == equalOp1 && parentOp2 != equalOp2) || (parentOp1 != equalOp1 && parentOp2 == equalOp2)) { // Input operator is not in the same position break; } // Compare input List<Operator<?>> removeOpsForCurrentInput = compareAndGatherOps(pctx, parentOp1, parentOp2); if (removeOpsForCurrentInput == null) { // Inputs are not the same, bail out break; } // Add inputs to ops to remove discardableOpsForCurrentOp.addAll(removeOpsForCurrentInput); } if (idx != currentOp1.getParentOperators().size()) { // If inputs are not equal, we could zip up till here break; } discardableInputOps.addAll(discardableOpsForCurrentOp); } equalOp1 = currentOp1; equalOp2 = currentOp2; retainableOps.add(equalOp1); discardableOps.add(equalOp2); if (equalOp1 instanceof MapJoinOperator) { MapJoinOperator mop = (MapJoinOperator) equalOp1; dataSize = StatsUtils.safeAdd(dataSize, mop.getConf().getInMemoryDataSize()); maxDataSize = mop.getConf().getMemoryMonitorInfo().getAdjustedNoConditionalTaskSize(); } if (currentOp1.getChildOperators().size() > 1 || currentOp2.getChildOperators().size() > 1) { // TODO: Support checking multiple child operators to merge further. break; } // Update for next iteration currentOp1 = currentOp1.getChildOperators().get(0); currentOp2 = currentOp2.getChildOperators().get(0); } // Add the rest to the memory consumption Set<Operator<?>> opsWork1 = findWorkOperators(optimizerCache, currentOp1); for (Operator<?> op : opsWork1) { if (op instanceof MapJoinOperator && !retainableOps.contains(op)) { MapJoinOperator mop = (MapJoinOperator) op; dataSize = StatsUtils.safeAdd(dataSize, mop.getConf().getInMemoryDataSize()); maxDataSize = mop.getConf().getMemoryMonitorInfo().getAdjustedNoConditionalTaskSize(); } } Set<Operator<?>> opsWork2 = findWorkOperators(optimizerCache, currentOp2); for (Operator<?> op : opsWork2) { if (op instanceof MapJoinOperator && !discardableOps.contains(op)) { MapJoinOperator mop = (MapJoinOperator) op; dataSize = StatsUtils.safeAdd(dataSize, mop.getConf().getInMemoryDataSize()); maxDataSize = mop.getConf().getMemoryMonitorInfo().getAdjustedNoConditionalTaskSize(); } } discardableInputOps.addAll(gatherDPPBranchOps(pctx, optimizerCache, Sets.union(discardableInputOps, discardableOps))); discardableInputOps.addAll(gatherDPPBranchOps(pctx, optimizerCache, retainableOps, discardableInputOps)); return new SharedResult(retainableOps, discardableOps, discardableInputOps, dataSize, maxDataSize); } private static Multiset<String> extractConjsIgnoringDPPPreds(ExprNodeDesc predicate) { List<ExprNodeDesc> conjsOp = ExprNodeDescUtils.split(predicate); Multiset<String> conjsOpString = TreeMultiset.create(); for (int i = 0; i < conjsOp.size(); i++) { if (conjsOp.get(i) instanceof ExprNodeGenericFuncDesc) { ExprNodeGenericFuncDesc func = (ExprNodeGenericFuncDesc) conjsOp.get(i); if (GenericUDFInBloomFilter.class == func.getGenericUDF().getClass()) { continue; } else if (GenericUDFBetween.class == func.getGenericUDF().getClass() && (func.getChildren().get(2) instanceof ExprNodeDynamicValueDesc || func.getChildren().get(3) instanceof ExprNodeDynamicValueDesc)) { continue; } } else if(conjsOp.get(i) instanceof ExprNodeDynamicListDesc) { continue; } conjsOpString.add(conjsOp.get(i).toString()); } return conjsOpString; } private static Set<Operator<?>> gatherDPPBranchOps(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Set<Operator<?>> ops) { Set<Operator<?>> dppBranches = new HashSet<>(); for (Operator<?> op : ops) { if (op instanceof TableScanOperator) { Collection<Operator<?>> c = optimizerCache.tableScanToDPPSource .get((TableScanOperator) op); for (Operator<?> dppSource : c) { // Remove the branches removeBranch(dppSource, dppBranches, ops); } } } return dppBranches; } private static Set<Operator<?>> gatherDPPBranchOps(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Set<Operator<?>> ops, Set<Operator<?>> discardedOps) { Set<Operator<?>> dppBranches = new HashSet<>(); for (Operator<?> op : ops) { if (op instanceof TableScanOperator) { Collection<Operator<?>> c = optimizerCache.tableScanToDPPSource .get((TableScanOperator) op); for (Operator<?> dppSource : c) { Set<Operator<?>> ascendants = findAscendantWorkOperators(pctx, optimizerCache, dppSource); if (!Collections.disjoint(ascendants, discardedOps)) { // Remove branch removeBranch(dppSource, dppBranches, ops); } } } } return dppBranches; } private static void removeBranch(Operator<?> currentOp, Set<Operator<?>> branchesOps, Set<Operator<?>> discardableOps) { if (currentOp.getNumChild() > 1) { for (Operator<?> childOp : currentOp.getChildOperators()) { if (!branchesOps.contains(childOp) && !discardableOps.contains(childOp)) { return; } } } branchesOps.add(currentOp); if (currentOp.getParentOperators() != null) { for (Operator<?> parentOp : currentOp.getParentOperators()) { removeBranch(parentOp, branchesOps, discardableOps); } } } private static List<Operator<?>> compareAndGatherOps(ParseContext pctx, Operator<?> op1, Operator<?> op2) throws SemanticException { List<Operator<?>> result = new ArrayList<>(); boolean mergeable = compareAndGatherOps(pctx, op1, op2, result, true); if (!mergeable) { return null; } return result; } private static boolean compareAndGatherOps(ParseContext pctx, Operator<?> op1, Operator<?> op2, List<Operator<?>> result, boolean gather) throws SemanticException { if (!compareOperator(pctx, op1, op2)) { LOG.debug("Operators not equal: {} and {}", op1, op2); return false; } if (gather && op2.getChildOperators().size() > 1) { // If the second operator has more than one child, we stop gathering gather = false; } if (gather) { result.add(op2); } List<Operator<? extends OperatorDesc>> op1ParentOperators = op1.getParentOperators(); List<Operator<? extends OperatorDesc>> op2ParentOperators = op2.getParentOperators(); if (op1ParentOperators != null && op2ParentOperators != null) { if (op1ParentOperators.size() != op2ParentOperators.size()) { return false; } for (int i = 0; i < op1ParentOperators.size(); i++) { Operator<?> op1ParentOp = op1ParentOperators.get(i); Operator<?> op2ParentOp = op2ParentOperators.get(i); boolean mergeable = compareAndGatherOps(pctx, op1ParentOp, op2ParentOp, result, gather); if (!mergeable) { return false; } } } else if (op1ParentOperators != null || op2ParentOperators != null) { return false; } return true; } private static boolean compareOperator(ParseContext pctx, Operator<?> op1, Operator<?> op2) throws SemanticException { if (!op1.getClass().getName().equals(op2.getClass().getName())) { return false; } // We handle ReduceSinkOperator here as we can safely ignore table alias // and the current comparator implementation does not. // We can ignore table alias since when we compare ReduceSinkOperator, all // its ancestors need to match (down to table scan), thus we make sure that // both plans are the same. // TODO: move this to logicalEquals if (op1 instanceof ReduceSinkOperator) { ReduceSinkDesc op1Conf = ((ReduceSinkOperator) op1).getConf(); ReduceSinkDesc op2Conf = ((ReduceSinkOperator) op2).getConf(); if (StringUtils.equals(op1Conf.getKeyColString(), op2Conf.getKeyColString()) && StringUtils.equals(op1Conf.getValueColsString(), op2Conf.getValueColsString()) && StringUtils.equals(op1Conf.getParitionColsString(), op2Conf.getParitionColsString()) && op1Conf.getTag() == op2Conf.getTag() && StringUtils.equals(op1Conf.getOrder(), op2Conf.getOrder()) && op1Conf.getTopN() == op2Conf.getTopN() && canDeduplicateReduceTraits(op1Conf, op2Conf)) { return true; } else { return false; } } // We handle TableScanOperator here as we can safely ignore table alias // and the current comparator implementation does not. // TODO: move this to logicalEquals if (op1 instanceof TableScanOperator) { TableScanOperator tsOp1 = (TableScanOperator) op1; TableScanOperator tsOp2 = (TableScanOperator) op2; TableScanDesc op1Conf = tsOp1.getConf(); TableScanDesc op2Conf = tsOp2.getConf(); Table tableMeta1 = op1Conf.getTableMetadata(); Table tableMeta2 = op2Conf.getTableMetadata(); if (StringUtils.equals(tableMeta1.getFullyQualifiedName(), tableMeta2.getFullyQualifiedName()) && op1Conf.getNeededColumns().equals(op2Conf.getNeededColumns()) && StringUtils.equals(op1Conf.getFilterExprString(), op2Conf.getFilterExprString()) && pctx.getPrunedPartitions(tsOp1).getPartitions().equals( pctx.getPrunedPartitions(tsOp2).getPartitions()) && op1Conf.getRowLimit() == op2Conf.getRowLimit() && Objects.equals(op1Conf.getOpProps(), op2Conf.getOpProps())) { return true; } else { return false; } } return op1.logicalEquals(op2); } private static boolean validPreConditions(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, SharedResult sr) { // We check whether merging the works would cause the size of // the data in memory grow too large. // TODO: Currently ignores GBY and PTF which may also buffer data in memory. if (sr.dataSize > sr.maxDataSize) { // Size surpasses limit, we cannot convert LOG.debug("accumulated data size: {} / max size: {}", sr.dataSize, sr.maxDataSize); return false; } Operator<?> op1 = sr.retainableOps.get(0); Operator<?> op2 = sr.discardableOps.get(0); // 1) The set of operators in the works that we are merging need to meet // some requirements. In particular: // 1.1. None of the works that we are merging can contain a Union // operator. This is not supported yet as we might end up with cycles in // the Tez DAG. // 1.2. There cannot be more than one DummyStore operator in the new resulting // work when the operators are merged. This is due to an assumption in // MergeJoinProc that needs to be further explored. // If any of these conditions are not met, we cannot merge. // TODO: Extend rule so it can be applied for these cases. final Set<Operator<?>> workOps1 = findWorkOperators(optimizerCache, op1); final Set<Operator<?>> workOps2 = findWorkOperators(optimizerCache, op2); boolean foundDummyStoreOp = false; for (Operator<?> op : workOps1) { if (op instanceof UnionOperator) { // We cannot merge (1.1) return false; } if (op instanceof DummyStoreOperator) { foundDummyStoreOp = true; } } for (Operator<?> op : workOps2) { if (op instanceof UnionOperator) { // We cannot merge (1.1) return false; } if (foundDummyStoreOp && op instanceof DummyStoreOperator) { // We cannot merge (1.2) return false; } } // 2) We check whether output works when we merge the operators will collide. // // Work1 Work2 (merge TS in W1 & W2) Work1 // \ / -> | | X // Work3 Work3 // // If we do, we cannot merge. The reason is that Tez currently does // not support parallel edges, i.e., multiple edges from same work x // into same work y. final Set<Operator<?>> outputWorksOps1 = findChildWorkOperators(pctx, optimizerCache, op1); final Set<Operator<?>> outputWorksOps2 = findChildWorkOperators(pctx, optimizerCache, op2); if (!Collections.disjoint(outputWorksOps1, outputWorksOps2)) { // We cannot merge return false; } // 3) We check whether we will end up with same operators inputing on same work. // // Work1 (merge TS in W2 & W3) Work1 // / \ -> | | X // Work2 Work3 Work2 // // If we do, we cannot merge. The reason is the same as above, currently // Tez does not support parallel edges. // // In the check, we exclude the inputs to the root operator that we are trying // to merge (only useful for extended merging as TS do not have inputs). final Set<Operator<?>> excludeOps1 = sr.retainableOps.get(0).getNumParent() > 0 ? ImmutableSet.copyOf(sr.retainableOps.get(0).getParentOperators()) : ImmutableSet.of(); final Set<Operator<?>> inputWorksOps1 = findParentWorkOperators(pctx, optimizerCache, op1, excludeOps1); final Set<Operator<?>> excludeOps2 = sr.discardableOps.get(0).getNumParent() > 0 ? Sets.union(ImmutableSet.copyOf(sr.discardableOps.get(0).getParentOperators()), sr.discardableInputOps) : sr.discardableInputOps; final Set<Operator<?>> inputWorksOps2 = findParentWorkOperators(pctx, optimizerCache, op2, excludeOps2); if (!Collections.disjoint(inputWorksOps1, inputWorksOps2)) { // We cannot merge return false; } // 4) We check whether one of the operators is part of a work that is an input for // the work of the other operator. // // Work1 (merge TS in W1 & W3) Work1 // | -> | X // Work2 Work2 // | | // Work3 Work1 // // If we do, we cannot merge, as we would end up with a cycle in the DAG. final Set<Operator<?>> descendantWorksOps1 = findDescendantWorkOperators(pctx, optimizerCache, op1, sr.discardableInputOps); final Set<Operator<?>> descendantWorksOps2 = findDescendantWorkOperators(pctx, optimizerCache, op2, sr.discardableInputOps); if (!Collections.disjoint(descendantWorksOps1, workOps2) || !Collections.disjoint(workOps1, descendantWorksOps2)) { return false; } return true; } private static Set<Operator<?>> findParentWorkOperators(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Operator<?> start) { return findParentWorkOperators(pctx, optimizerCache, start, ImmutableSet.of()); } private static Set<Operator<?>> findParentWorkOperators(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Operator<?> start, Set<Operator<?>> excludeOps) { // Find operators in work Set<Operator<?>> workOps = findWorkOperators(optimizerCache, start); // Gather input works operators Set<Operator<?>> set = new HashSet<Operator<?>>(); for (Operator<?> op : workOps) { if (op.getParentOperators() != null) { for (Operator<?> parent : op.getParentOperators()) { if (parent instanceof ReduceSinkOperator && !excludeOps.contains(parent)) { set.addAll(findWorkOperators(optimizerCache, parent)); } } } else if (op instanceof TableScanOperator) { // Check for DPP and semijoin DPP for (Operator<?> parent : optimizerCache.tableScanToDPPSource.get((TableScanOperator) op)) { if (!excludeOps.contains(parent)) { set.addAll(findWorkOperators(optimizerCache, parent)); } } } } return set; } private static Set<Operator<?>> findAscendantWorkOperators(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Operator<?> start) { // Find operators in work Set<Operator<?>> workOps = findWorkOperators(optimizerCache, start); // Gather input works operators Set<Operator<?>> result = new HashSet<Operator<?>>(); Set<Operator<?>> set; while (!workOps.isEmpty()) { set = new HashSet<Operator<?>>(); for (Operator<?> op : workOps) { if (op.getParentOperators() != null) { for (Operator<?> parent : op.getParentOperators()) { if (parent instanceof ReduceSinkOperator) { set.addAll(findWorkOperators(optimizerCache, parent)); } } } else if (op instanceof TableScanOperator) { // Check for DPP and semijoin DPP for (Operator<?> parent : optimizerCache.tableScanToDPPSource.get((TableScanOperator) op)) { set.addAll(findWorkOperators(optimizerCache, parent)); } } } workOps = set; result.addAll(set); } return result; } private static Set<Operator<?>> findChildWorkOperators(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Operator<?> start) { // Find operators in work Set<Operator<?>> workOps = findWorkOperators(optimizerCache, start); // Gather output works operators Set<Operator<?>> set = new HashSet<Operator<?>>(); for (Operator<?> op : workOps) { if (op instanceof ReduceSinkOperator) { if (op.getChildOperators() != null) { // All children of RS are descendants for (Operator<?> child : op.getChildOperators()) { set.addAll(findWorkOperators(optimizerCache, child)); } } // Semijoin DPP work is considered a child because work needs // to finish for it to execute SemiJoinBranchInfo sjbi = pctx.getRsToSemiJoinBranchInfo().get(op); if (sjbi != null) { set.addAll(findWorkOperators(optimizerCache, sjbi.getTsOp())); } } else if(op.getConf() instanceof DynamicPruningEventDesc) { // DPP work is considered a child because work needs // to finish for it to execute set.addAll(findWorkOperators( optimizerCache, ((DynamicPruningEventDesc) op.getConf()).getTableScan())); } } return set; } private static Set<Operator<?>> findDescendantWorkOperators(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, Operator<?> start, Set<Operator<?>> excludeOps) { // Find operators in work Set<Operator<?>> workOps = findWorkOperators(optimizerCache, start); // Gather output works operators Set<Operator<?>> result = new HashSet<Operator<?>>(); Set<Operator<?>> set; while (!workOps.isEmpty()) { set = new HashSet<Operator<?>>(); for (Operator<?> op : workOps) { if (excludeOps.contains(op)) { continue; } if (op instanceof ReduceSinkOperator) { if (op.getChildOperators() != null) { // All children of RS are descendants for (Operator<?> child : op.getChildOperators()) { set.addAll(findWorkOperators(optimizerCache, child)); } } // Semijoin DPP work is considered a descendant because work needs // to finish for it to execute SemiJoinBranchInfo sjbi = pctx.getRsToSemiJoinBranchInfo().get(op); if (sjbi != null) { set.addAll(findWorkOperators(optimizerCache, sjbi.getTsOp())); } } else if(op.getConf() instanceof DynamicPruningEventDesc) { // DPP work is considered a descendant because work needs // to finish for it to execute set.addAll(findWorkOperators( optimizerCache, ((DynamicPruningEventDesc) op.getConf()).getTableScan())); } } workOps = set; result.addAll(set); } return result; } // Stores result in cache private static Set<Operator<?>> findWorkOperators( SharedWorkOptimizerCache optimizerCache, Operator<?> start) { Set<Operator<?>> c = optimizerCache.operatorToWorkOperators.get(start); if (!c.isEmpty()) { return c; } c = findWorkOperators(start, new HashSet<Operator<?>>()); for (Operator<?> op : c) { optimizerCache.operatorToWorkOperators.putAll(op, c); } return c; } private static Set<Operator<?>> findWorkOperators(Operator<?> start, Set<Operator<?>> found) { found.add(start); if (start.getParentOperators() != null) { for (Operator<?> parent : start.getParentOperators()) { if (parent instanceof ReduceSinkOperator) { continue; } if (!found.contains(parent)) { findWorkOperators(parent, found); } } } if (start instanceof ReduceSinkOperator) { return found; } if (start.getChildOperators() != null) { for (Operator<?> child : start.getChildOperators()) { if (!found.contains(child)) { findWorkOperators(child, found); } } } return found; } private static void pushFilterToTopOfTableScan( SharedWorkOptimizerCache optimizerCache, TableScanOperator tsOp) throws UDFArgumentException { ExprNodeGenericFuncDesc tableScanExprNode = tsOp.getConf().getFilterExpr(); List<Operator<? extends OperatorDesc>> allChildren = Lists.newArrayList(tsOp.getChildOperators()); for (Operator<? extends OperatorDesc> op : allChildren) { if (op instanceof FilterOperator) { FilterOperator filterOp = (FilterOperator) op; ExprNodeDesc filterExprNode = filterOp.getConf().getPredicate(); if (tableScanExprNode.isSame(filterExprNode)) { // We do not need to do anything return; } if (tableScanExprNode.getGenericUDF() instanceof GenericUDFOPOr) { for (ExprNodeDesc childExprNode : tableScanExprNode.getChildren()) { if (childExprNode.isSame(filterExprNode)) { // We do not need to do anything, it is in the OR expression // so probably we pushed previously return; } } } ExprNodeGenericFuncDesc newPred = ExprNodeGenericFuncDesc.newInstance( new GenericUDFOPAnd(), Arrays.<ExprNodeDesc>asList(tableScanExprNode.clone(), filterExprNode)); filterOp.getConf().setPredicate(newPred); } else { Operator<FilterDesc> newOp = OperatorFactory.get(tsOp.getCompilationOpContext(), new FilterDesc(tableScanExprNode.clone(), false), new RowSchema(tsOp.getSchema().getSignature())); tsOp.replaceChild(op, newOp); newOp.getParentOperators().add(tsOp); op.replaceParent(tsOp, newOp); newOp.getChildOperators().add(op); // Add to cache (same group as tsOp) optimizerCache.putIfWorkExists(newOp, tsOp); } } } static boolean canDeduplicateReduceTraits(ReduceSinkDesc retainable, ReduceSinkDesc discardable) { return deduplicateReduceTraits(retainable, discardable, false); } static boolean deduplicateReduceTraits(ReduceSinkDesc retainable, ReduceSinkDesc discardable) { return deduplicateReduceTraits(retainable, discardable, true); } private static boolean deduplicateReduceTraits(ReduceSinkDesc retainable, ReduceSinkDesc discardable, boolean apply) { final EnumSet<ReduceSinkDesc.ReducerTraits> retainableTraits = retainable.getReducerTraits(); final EnumSet<ReduceSinkDesc.ReducerTraits> discardableTraits = discardable.getReducerTraits(); final boolean x1 = retainableTraits.contains(UNSET); final boolean f1 = retainableTraits.contains(FIXED); final boolean u1 = retainableTraits.contains(UNIFORM); final boolean a1 = retainableTraits.contains(AUTOPARALLEL); final int n1 = retainable.getNumReducers(); final boolean x2 = discardableTraits.contains(UNSET); final boolean f2 = discardableTraits.contains(FIXED); final boolean u2 = discardableTraits.contains(UNIFORM); final boolean a2 = discardableTraits.contains(AUTOPARALLEL); final int n2 = discardable.getNumReducers(); boolean dedup = false; boolean x3 = false; boolean f3 = false; boolean u3 = false; boolean a3 = false; int n3 = n1; // NOTE: UNSET is exclusive from other traits, so FIXED is. if (x1 || x2) { // UNSET + X = X dedup = true; n3 = Math.max(n1, n2); x3 = x1 && x2; f3 = f1 || f2; u3 = u1 || u2; a3 = a1 || a2; } else if (f1 || f2) { if (f1 && f2) { // FIXED(x) + FIXED(x) = FIXED(x) // FIXED(x) + FIXED(y) = no deduplication (where x != y) if (n1 == n2) { dedup = true; f3 = true; } } else { // FIXED(x) + others = FIXED(x) dedup = true; f3 = true; if (f1) { n3 = n1; } else { n3 = n2; } } } else { if (u1 && u2) { // UNIFORM(x) + UNIFORM(y) = UNIFORM(max(x, y)) dedup = true; u3 = true; n3 = Math.max(n1, n2); } if (a1 && a2) { // AUTOPARALLEL(x) + AUTOPARALLEL(y) = AUTOPARALLEL(max(x, y)) dedup = true; a3 = true; n3 = Math.max(n1, n2); } } // Gether the results into the retainable object if (apply && dedup) { retainable.setNumReducers(n3); if (x3) { retainableTraits.add(UNSET); } else { retainableTraits.remove(UNSET); } if (f3) { retainableTraits.add(FIXED); } else { retainableTraits.remove(FIXED); } if (u3) { retainableTraits.add(UNIFORM); } else { retainableTraits.remove(UNIFORM); } if (a3) { retainableTraits.add(AUTOPARALLEL); } else { retainableTraits.remove(AUTOPARALLEL); } } return dedup; } private static class SharedResult { final List<Operator<?>> retainableOps; final List<Operator<?>> discardableOps; final Set<Operator<?>> discardableInputOps; final long dataSize; final long maxDataSize; private SharedResult(Collection<Operator<?>> retainableOps, Collection<Operator<?>> discardableOps, Set<Operator<?>> discardableInputOps, long dataSize, long maxDataSize) { this.retainableOps = ImmutableList.copyOf(retainableOps); this.discardableOps = ImmutableList.copyOf(discardableOps); this.discardableInputOps = ImmutableSet.copyOf(discardableInputOps); this.dataSize = dataSize; this.maxDataSize = maxDataSize; } @Override public String toString() { return "SharedResult { " + this.retainableOps + "; " + this.discardableOps + "; " + this.discardableInputOps + "};"; } } /** Cache to accelerate optimization */ private static class SharedWorkOptimizerCache { // Operators that belong to each work final HashMultimap<Operator<?>, Operator<?>> operatorToWorkOperators = HashMultimap.<Operator<?>, Operator<?>>create(); // Table scan operators to DPP sources final Multimap<TableScanOperator, Operator<?>> tableScanToDPPSource = HashMultimap.<TableScanOperator, Operator<?>>create(); // Add new operator to cache work group of existing operator (if group exists) void putIfWorkExists(Operator<?> opToAdd, Operator<?> existingOp) { List<Operator<?>> c = ImmutableList.copyOf(operatorToWorkOperators.get(existingOp)); if (!c.isEmpty()) { for (Operator<?> op : c) { operatorToWorkOperators.get(op).add(opToAdd); } operatorToWorkOperators.putAll(opToAdd, c); operatorToWorkOperators.put(opToAdd, opToAdd); } } // Remove operator void removeOp(Operator<?> opToRemove) { Set<Operator<?>> s = operatorToWorkOperators.get(opToRemove); s.remove(opToRemove); List<Operator<?>> c1 = ImmutableList.copyOf(s); if (!c1.isEmpty()) { for (Operator<?> op1 : c1) { operatorToWorkOperators.remove(op1, opToRemove); // Remove operator } operatorToWorkOperators.removeAll(opToRemove); // Remove entry for operator } } // Remove operator and combine void removeOpAndCombineWork(Operator<?> opToRemove, Operator<?> replacementOp) { Set<Operator<?>> s = operatorToWorkOperators.get(opToRemove); s.remove(opToRemove); List<Operator<?>> c1 = ImmutableList.copyOf(s); List<Operator<?>> c2 = ImmutableList.copyOf(operatorToWorkOperators.get(replacementOp)); if (!c1.isEmpty() && !c2.isEmpty()) { for (Operator<?> op1 : c1) { operatorToWorkOperators.remove(op1, opToRemove); // Remove operator operatorToWorkOperators.putAll(op1, c2); // Add ops of new collection } operatorToWorkOperators.removeAll(opToRemove); // Remove entry for operator for (Operator<?> op2 : c2) { operatorToWorkOperators.putAll(op2, c1); // Add ops to existing collection } } } @Override public String toString() { return "SharedWorkOptimizerCache { \n" + operatorToWorkOperators.toString() + "\n };"; } } }
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.ballerinalang.logging.handlers; import java.io.IOException; import java.util.logging.FileHandler; /** * A custom file handler for handling Ballerina user level log file * * @since 0.89 */ public class BallerinaLogFileHandler extends FileHandler { public BallerinaLogFileHandler() throws IOException, SecurityException { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.runtime.evaluators.functions.temporal; import java.io.DataOutput; import org.apache.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer; import org.apache.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer; import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider; import org.apache.asterix.om.base.ADateTime; import org.apache.asterix.om.base.AMutableDateTime; import org.apache.asterix.om.base.temporal.GregorianCalendarSystem; import org.apache.asterix.runtime.exceptions.TypeMismatchException; import org.apache.asterix.om.functions.BuiltinFunctions; import org.apache.asterix.om.functions.IFunctionDescriptor; import org.apache.asterix.om.functions.IFunctionDescriptorFactory; import org.apache.asterix.om.types.ATypeTag; import org.apache.asterix.om.types.BuiltinType; import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor; import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier; import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator; import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory; import org.apache.hyracks.api.context.IHyracksTaskContext; import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.data.std.api.IPointable; import org.apache.hyracks.data.std.primitive.VoidPointable; import org.apache.hyracks.data.std.util.ArrayBackedValueStorage; import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference; public class DatetimeFromDateAndTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor { private static final long serialVersionUID = 1L; public final static FunctionIdentifier FID = BuiltinFunctions.DATETIME_FROM_DATE_TIME; public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() { @Override public IFunctionDescriptor createFunctionDescriptor() { return new DatetimeFromDateAndTimeDescriptor(); } }; /* (non-Javadoc) * @see org.apache.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[]) */ @Override public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) { return new IScalarEvaluatorFactory() { private static final long serialVersionUID = 1L; @Override public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException { return new IScalarEvaluator() { private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage(); private DataOutput out = resultStorage.getDataOutput(); private IPointable argPtr0 = new VoidPointable(); private IPointable argPtr1 = new VoidPointable(); private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx); private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx); // possible returning types @SuppressWarnings("unchecked") private ISerializerDeserializer<ADateTime> datetimeSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME); private AMutableDateTime aDateTime = new AMutableDateTime(0); @Override public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException { resultStorage.reset(); eval0.evaluate(tuple, argPtr0); eval1.evaluate(tuple, argPtr1); byte[] bytes0 = argPtr0.getByteArray(); int offset0 = argPtr0.getStartOffset(); byte[] bytes1 = argPtr1.getByteArray(); int offset1 = argPtr1.getStartOffset(); if (bytes0[offset0] != ATypeTag.SERIALIZED_DATE_TYPE_TAG) { throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_DATE_TYPE_TAG); } if (bytes1[offset1] != ATypeTag.SERIALIZED_TIME_TYPE_TAG) { throw new TypeMismatchException(getIdentifier(), 1, bytes1[offset1], ATypeTag.SERIALIZED_TIME_TYPE_TAG); } long datetimeChronon = ADateSerializerDeserializer.getChronon(bytes0, offset0 + 1) * GregorianCalendarSystem.CHRONON_OF_DAY + ATimeSerializerDeserializer.getChronon(bytes1, offset1 + 1); aDateTime.setValue(datetimeChronon); datetimeSerde.serialize(aDateTime, out); result.set(resultStorage); } }; } }; } /* (non-Javadoc) * @see org.apache.asterix.om.functions.IFunctionDescriptor#getIdentifier() */ @Override public FunctionIdentifier getIdentifier() { return FID; } }
// Copyright 2016 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////// package com.google.pubsub.kafka.sink; import com.google.pubsub.kafka.common.ConnectorUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; import org.apache.kafka.common.utils.AppInfoParser; import org.apache.kafka.connect.connector.Task; import org.apache.kafka.connect.sink.SinkConnector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link SinkConnector} that writes messages to a specified topic in <a * href="https://cloud.google.com/pubsub">Google Cloud Pub/Sub</a>. */ public class CloudPubSubSinkConnector extends SinkConnector { private static final Logger log = LoggerFactory.getLogger(CloudPubSubSinkConnector.class); public static final String MAX_BUFFER_SIZE_CONFIG = "maxBufferSize"; public static final String MAX_BUFFER_BYTES_CONFIG = "maxBufferBytes"; public static final String MAX_DELAY_THRESHOLD_MS = "delayThresholdMs"; public static final String MAX_REQUEST_TIMEOUT_MS = "maxRequestTimeoutMs"; public static final String MAX_TOTAL_TIMEOUT_MS = "maxTotalTimeoutMs"; public static final String MAX_SHUTDOWN_TIMEOUT_MS = "maxShutdownTimeoutMs"; public static final int DEFAULT_MAX_BUFFER_SIZE = 100; public static final long DEFAULT_MAX_BUFFER_BYTES = 10000000L; public static final int DEFAULT_DELAY_THRESHOLD_MS = 100; public static final int DEFAULT_REQUEST_TIMEOUT_MS = 10000; public static final int DEFAULT_TOTAL_TIMEOUT_MS = 60000; public static final int DEFAULT_SHUTDOWN_TIMEOUT_MS = 60000; public static final String CPS_MESSAGE_BODY_NAME = "messageBodyName"; public static final String DEFAULT_MESSAGE_BODY_NAME = "cps_message_body"; public static final String PUBLISH_KAFKA_METADATA = "metadata.publish"; private Map<String, String> props; @Override public String version() { return AppInfoParser.getVersion(); } @Override public void start(Map<String, String> props) { this.props = props; log.info("Started the CloudPubSubSinkConnector."); } @Override public Class<? extends Task> taskClass() { return CloudPubSubSinkTask.class; } @Override public List<Map<String, String>> taskConfigs(int maxTasks) { // Each task will get the exact same configuration. Delegate all config validation to the task. ArrayList<Map<String, String>> configs = new ArrayList<>(); for (int i = 0; i < maxTasks; i++) { Map<String, String> config = new HashMap<>(props); configs.add(config); } return configs; } @Override public ConfigDef config() { return new ConfigDef() .define( ConnectorUtils.CPS_PROJECT_CONFIG, Type.STRING, Importance.HIGH, "The project containing the topic to which to publish.") .define( ConnectorUtils.CPS_TOPIC_CONFIG, Type.STRING, Importance.HIGH, "The topic to which to publish.") .define( MAX_BUFFER_SIZE_CONFIG, Type.INT, DEFAULT_MAX_BUFFER_SIZE, ConfigDef.Range.between(1, Integer.MAX_VALUE), Importance.MEDIUM, "The maximum number of messages that can be received for the messages on a topic " + "partition before publishing them to Cloud Pub/Sub.") .define( MAX_BUFFER_BYTES_CONFIG, Type.LONG, DEFAULT_MAX_BUFFER_BYTES, ConfigDef.Range.between(1, DEFAULT_MAX_BUFFER_BYTES), Importance.MEDIUM, "The maximum number of bytes that can be received for the messages on a topic " + "partition before publishing the messages to Cloud Pub/Sub.") .define( MAX_DELAY_THRESHOLD_MS, Type.INT, DEFAULT_DELAY_THRESHOLD_MS, ConfigDef.Range.between(1, Integer.MAX_VALUE), Importance.MEDIUM, "The maximum amount of time to wait after receiving the first message in a batch for a " + "before publishing the messages to Cloud Pub/Sub.") .define( MAX_REQUEST_TIMEOUT_MS, Type.INT, DEFAULT_REQUEST_TIMEOUT_MS, ConfigDef.Range.between(10000, Integer.MAX_VALUE), Importance.MEDIUM, "The maximum amount of time to wait for a single publish request to Cloud Pub/Sub.") .define( MAX_TOTAL_TIMEOUT_MS, Type.INT, DEFAULT_TOTAL_TIMEOUT_MS, ConfigDef.Range.between(10000, Integer.MAX_VALUE), Importance.MEDIUM, "The maximum amount of time to wait for a publish to complete (including retries) in " + "Cloud Pub/Sub.") .define( MAX_SHUTDOWN_TIMEOUT_MS, Type.INT, DEFAULT_SHUTDOWN_TIMEOUT_MS, ConfigDef.Range.between(10000, Integer.MAX_VALUE), Importance.MEDIUM, "The maximum amount of time to wait for a publisher to shutdown when stopping task " + "in Kafka Connect.") .define( PUBLISH_KAFKA_METADATA, Type.BOOLEAN, false, Importance.MEDIUM, "When true, include the Kafka topic, partition, offset, and timestamp as message " + "attributes when a message is published to Cloud Pub/Sub.") .define(CPS_MESSAGE_BODY_NAME, Type.STRING, DEFAULT_MESSAGE_BODY_NAME, Importance.MEDIUM, "When using a struct or map value schema, this field or key name indicates that the " + "corresponding value will go into the Pub/Sub message body.") .define(ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG, Type.STRING, null, Importance.HIGH, "The path to the GCP credentials file") .define(ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG, Type.STRING, null, Importance.HIGH, "GCP JSON credentials"); } @Override public void stop() {} }
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ //---------------------------------------------------- // THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST. //---------------------------------------------------- package co.elastic.clients.elasticsearch.watcher; import co.elastic.clients.json.JsonpDeserializable; import co.elastic.clients.json.JsonpDeserializer; import co.elastic.clients.json.JsonpMapper; import co.elastic.clients.json.JsonpSerializable; import co.elastic.clients.json.ObjectBuilderDeserializer; import co.elastic.clients.json.ObjectDeserializer; import co.elastic.clients.util.ApiTypeHelper; import co.elastic.clients.util.ObjectBuilder; import co.elastic.clients.util.WithJsonObjectBuilderBase; import jakarta.json.stream.JsonGenerator; import java.lang.String; import java.util.List; import java.util.Objects; import java.util.function.Function; import javax.annotation.Nullable; // typedef: watcher._types.SlackMessage /** * * @see <a href="../doc-files/api-spec.html#watcher._types.SlackMessage">API * specification</a> */ @JsonpDeserializable public class SlackMessage implements JsonpSerializable { private final List<SlackAttachment> attachments; @Nullable private final SlackDynamicAttachment dynamicAttachments; private final String from; @Nullable private final String icon; private final String text; private final List<String> to; // --------------------------------------------------------------------------------------------- private SlackMessage(Builder builder) { this.attachments = ApiTypeHelper.unmodifiableRequired(builder.attachments, this, "attachments"); this.dynamicAttachments = builder.dynamicAttachments; this.from = ApiTypeHelper.requireNonNull(builder.from, this, "from"); this.icon = builder.icon; this.text = ApiTypeHelper.requireNonNull(builder.text, this, "text"); this.to = ApiTypeHelper.unmodifiableRequired(builder.to, this, "to"); } public static SlackMessage of(Function<Builder, ObjectBuilder<SlackMessage>> fn) { return fn.apply(new Builder()).build(); } /** * Required - API name: {@code attachments} */ public final List<SlackAttachment> attachments() { return this.attachments; } /** * API name: {@code dynamic_attachments} */ @Nullable public final SlackDynamicAttachment dynamicAttachments() { return this.dynamicAttachments; } /** * Required - API name: {@code from} */ public final String from() { return this.from; } /** * API name: {@code icon} */ @Nullable public final String icon() { return this.icon; } /** * Required - API name: {@code text} */ public final String text() { return this.text; } /** * Required - API name: {@code to} */ public final List<String> to() { return this.to; } /** * Serialize this object to JSON. */ public void serialize(JsonGenerator generator, JsonpMapper mapper) { generator.writeStartObject(); serializeInternal(generator, mapper); generator.writeEnd(); } protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) { if (ApiTypeHelper.isDefined(this.attachments)) { generator.writeKey("attachments"); generator.writeStartArray(); for (SlackAttachment item0 : this.attachments) { item0.serialize(generator, mapper); } generator.writeEnd(); } if (this.dynamicAttachments != null) { generator.writeKey("dynamic_attachments"); this.dynamicAttachments.serialize(generator, mapper); } generator.writeKey("from"); generator.write(this.from); if (this.icon != null) { generator.writeKey("icon"); generator.write(this.icon); } generator.writeKey("text"); generator.write(this.text); if (ApiTypeHelper.isDefined(this.to)) { generator.writeKey("to"); generator.writeStartArray(); for (String item0 : this.to) { generator.write(item0); } generator.writeEnd(); } } // --------------------------------------------------------------------------------------------- /** * Builder for {@link SlackMessage}. */ public static class Builder extends WithJsonObjectBuilderBase<Builder> implements ObjectBuilder<SlackMessage> { private List<SlackAttachment> attachments; @Nullable private SlackDynamicAttachment dynamicAttachments; private String from; @Nullable private String icon; private String text; private List<String> to; /** * Required - API name: {@code attachments} * <p> * Adds all elements of <code>list</code> to <code>attachments</code>. */ public final Builder attachments(List<SlackAttachment> list) { this.attachments = _listAddAll(this.attachments, list); return this; } /** * Required - API name: {@code attachments} * <p> * Adds one or more values to <code>attachments</code>. */ public final Builder attachments(SlackAttachment value, SlackAttachment... values) { this.attachments = _listAdd(this.attachments, value, values); return this; } /** * Required - API name: {@code attachments} * <p> * Adds a value to <code>attachments</code> using a builder lambda. */ public final Builder attachments(Function<SlackAttachment.Builder, ObjectBuilder<SlackAttachment>> fn) { return attachments(fn.apply(new SlackAttachment.Builder()).build()); } /** * API name: {@code dynamic_attachments} */ public final Builder dynamicAttachments(@Nullable SlackDynamicAttachment value) { this.dynamicAttachments = value; return this; } /** * API name: {@code dynamic_attachments} */ public final Builder dynamicAttachments( Function<SlackDynamicAttachment.Builder, ObjectBuilder<SlackDynamicAttachment>> fn) { return this.dynamicAttachments(fn.apply(new SlackDynamicAttachment.Builder()).build()); } /** * Required - API name: {@code from} */ public final Builder from(String value) { this.from = value; return this; } /** * API name: {@code icon} */ public final Builder icon(@Nullable String value) { this.icon = value; return this; } /** * Required - API name: {@code text} */ public final Builder text(String value) { this.text = value; return this; } /** * Required - API name: {@code to} * <p> * Adds all elements of <code>list</code> to <code>to</code>. */ public final Builder to(List<String> list) { this.to = _listAddAll(this.to, list); return this; } /** * Required - API name: {@code to} * <p> * Adds one or more values to <code>to</code>. */ public final Builder to(String value, String... values) { this.to = _listAdd(this.to, value, values); return this; } @Override protected Builder self() { return this; } /** * Builds a {@link SlackMessage}. * * @throws NullPointerException * if some of the required fields are null. */ public SlackMessage build() { _checkSingleUse(); return new SlackMessage(this); } } // --------------------------------------------------------------------------------------------- /** * Json deserializer for {@link SlackMessage} */ public static final JsonpDeserializer<SlackMessage> _DESERIALIZER = ObjectBuilderDeserializer.lazy(Builder::new, SlackMessage::setupSlackMessageDeserializer); protected static void setupSlackMessageDeserializer(ObjectDeserializer<SlackMessage.Builder> op) { op.add(Builder::attachments, JsonpDeserializer.arrayDeserializer(SlackAttachment._DESERIALIZER), "attachments"); op.add(Builder::dynamicAttachments, SlackDynamicAttachment._DESERIALIZER, "dynamic_attachments"); op.add(Builder::from, JsonpDeserializer.stringDeserializer(), "from"); op.add(Builder::icon, JsonpDeserializer.stringDeserializer(), "icon"); op.add(Builder::text, JsonpDeserializer.stringDeserializer(), "text"); op.add(Builder::to, JsonpDeserializer.arrayDeserializer(JsonpDeserializer.stringDeserializer()), "to"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tuscany.sca.host.http; import java.net.URL; import javax.servlet.RequestDispatcher; import javax.servlet.Servlet; import javax.servlet.ServletContext; /** * Interface implemented by host environments that allow Servlets to be * registered. * <p/> * This interface allows a system service to register a Servlet * to handle inbound requests. * * @version $Rev$ $Date$ * @tuscany.spi.extension.inheritfrom */ public interface ServletHost { ServletContext getServletContext(); /** * Sets the default port for the server. * * @param port the default port */ void setDefaultPort(int port); /** * Returns the default port for the server. * * @return the default port */ int getDefaultPort(); /** * Add a mapping for an instance of a Servlet. This requests that the * Servlet container direct all requests to the designated mapping to the * supplied Servlet instance. * * @param uri the URI-mapping for the Servlet * @param servlet the Servlet that should be invoked * @return The deployed URI * @throws ServletMappingException */ String addServletMapping(String uri, Servlet servlet) throws ServletMappingException; /** * Add a mapping for an instance of a Servlet. This requests that the * Servlet container direct all requests to the designated mapping to the * supplied Servlet instance. SecurityContext can be passed to enable * QoS services such as Confidentiality (SSL) and Authentication/Authorization * * @param uri the URI-mapping for the Servlet * @param servlet the Servlet that should be invoked * @param securityContext the SecurityContext to enable QoS services * @return The deployed URI * @throws ServletMappingException */ String addServletMapping(String uri, Servlet servlet, SecurityContext securityContext) throws ServletMappingException; /** * Remove a Servlet mapping. This directs the Servlet container not to direct * any more requests to a previously registered Servlet. * * @param uri the URI-mapping for the Servlet * @return the Servlet that was registered to the mapping, null if nothing * was registered to the mapping * @throws ServletMappingException */ Servlet removeServletMapping(String uri) throws ServletMappingException; /** * Returns the Servlet mapped to the given URI. * * @param uri the URI-mapping for the Servlet * @return the Servlet registered with the mapping * @throws ServletMappingException */ Servlet getServletMapping(String uri) throws ServletMappingException; /** * Returns a Servlet request dispatcher for the Servlet mapped to the specified URI. * * @param uri the URI mapped to a Servlet * @return a RequestDispatcher that can be used to dispatch requests to * that Servlet * @throws ServletMappingException */ RequestDispatcher getRequestDispatcher(String uri) throws ServletMappingException; /** * Returns the portion of the request URI that indicates the context of the request * * @return a String specifying the portion of the request URI that indicates the context of the request */ String getContextPath(); /** * Sets the portion of the request URI that indicates the context of the request * * @param path the context path */ void setContextPath(String path); /** * Returns the complete URL mapped to the specified URI. * @return the URL mapped to the specified URI */ URL getURLMapping(String uri, SecurityContext securityContext); /** * Set an attribute in the application ServletContext * @param name the name of the attribute * @param value the attribute value */ void setAttribute(String name, Object value); /** * Returns the name that identify the server type (e.g jetty) * @return */ String getName(); }
/***************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ****************************************************************/ package org.apache.cayenne.configuration.xml; import org.apache.cayenne.map.DataMap; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; /** * @since 4.1 */ public class RootDataMapHandler extends VersionAwareHandler { public RootDataMapHandler(LoaderContext loaderContext) { super(loaderContext, "data-map"); setTargetNamespace(DataMap.SCHEMA_XSD); } @Override protected ContentHandler createChildTagHandler(String namespaceURI, String localName, String qName, Attributes attributes) { if(targetNamespace.equals(namespaceURI) && "data-map".equals(localName)) { return new DataMapHandler(this); } return super.createChildTagHandler(namespaceURI, localName, qName, attributes); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.nio_char.tests.java.nio.charset; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetEncoder; import java.nio.charset.CoderResult; import java.nio.charset.CodingErrorAction; import java.nio.charset.MalformedInputException; import java.nio.charset.UnmappableCharacterException; import junit.framework.TestCase; public class ASCIICharsetEncoderTest extends TestCase { // charset for ascii private static final Charset cs = Charset.forName("ascii"); private static final CharsetEncoder encoder = cs.newEncoder(); private static final int MAXCODEPOINT = 0x7F; /* * @see CharsetEncoderTest#setUp() */ protected void setUp() throws Exception { } /* * @see CharsetEncoderTest#tearDown() */ protected void tearDown() throws Exception { } public void testCanEncodeCharSequence() { // normal case for ascCS assertTrue(encoder.canEncode("\u0077")); assertFalse(encoder.canEncode("\uc2a3")); assertFalse(encoder.canEncode("\ud800\udc00")); try { encoder.canEncode(null); } catch (NullPointerException e) { } assertTrue(encoder.canEncode("")); } public void testCanEncodeSurrogate () { assertFalse(encoder.canEncode('\ud800')); assertFalse(encoder.canEncode("\udc00")); } public void testCanEncodechar() throws CharacterCodingException { assertTrue(encoder.canEncode('\u0077')); assertFalse(encoder.canEncode('\uc2a3')); } public void testSpecificDefaultValue() { assertEquals(1.0, encoder.averageBytesPerChar(), 0.0); assertEquals(1.0, encoder.maxBytesPerChar(), 0.0); } public void testMultiStepEncode() throws CharacterCodingException { encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); try { encoder.encode(CharBuffer.wrap("\ud800\udc00")); fail("should unmappable"); } catch (UnmappableCharacterException e) { } encoder.reset(); ByteBuffer out = ByteBuffer.allocate(10); assertTrue(encoder.encode(CharBuffer.wrap("\ud800"), out, true) .isMalformed()); encoder.flush(out); encoder.reset(); out = ByteBuffer.allocate(10); assertSame(CoderResult.UNDERFLOW, encoder.encode(CharBuffer .wrap("\ud800"), out, false)); assertTrue(encoder.encode(CharBuffer.wrap("\udc00"), out, true) .isMalformed()); } public void testEncodeMapping() throws CharacterCodingException { encoder.reset(); for (int i =0; i <= MAXCODEPOINT; i++) { char[] chars = Character.toChars(i); CharBuffer cb = CharBuffer.wrap(chars); ByteBuffer bb = encoder.encode(cb); assertEquals(i, bb.get(0)); } CharBuffer cb = CharBuffer.wrap("\u0080"); try { encoder.encode(cb); } catch (UnmappableCharacterException e) { //expected } cb = CharBuffer.wrap("\ud800"); try { encoder.encode(cb); } catch (MalformedInputException e) { //expected } ByteBuffer bb = ByteBuffer.allocate(0x10); cb = CharBuffer.wrap("A"); encoder.reset(); encoder.encode(cb, bb, false); try { encoder.encode(cb); } catch (IllegalStateException e) { //expected } } public void testInternalState() { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); //normal encoding process encoder.reset(); encoder.encode(in, out, false); in = CharBuffer.wrap("B"); encoder.encode(in, out, true); encoder.flush(out); } //reset could be called at any time public void testInternalState_Reset() { CharsetEncoder newEncoder = cs.newEncoder(); //Init - > reset newEncoder.reset(); //reset - > reset newEncoder.reset(); //encoding - >reset { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, false); newEncoder.reset(); } //encoding end -> reset { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); newEncoder.reset(); } //flused -> reset { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); newEncoder.flush(out); newEncoder.reset(); } } public void testInternalState_Encoding() { CharsetEncoder newEncoder = cs.newEncoder(); //Init - > encoding { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, false); } //reset - > encoding { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.reset(); newEncoder.encode(in, out, false); } //reset - > encoding - > encoding { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, false); in = CharBuffer.wrap("BC"); newEncoder.encode(in, out, false); } //encoding_end - > encoding { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); in = CharBuffer.wrap("BC"); try { newEncoder.encode(in, out, false); fail("Should throw IllegalStateException"); } catch (IllegalStateException e) { //expected } } //flushed - > encoding { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); newEncoder.flush(out); in = CharBuffer.wrap("BC"); try { newEncoder.encode(in, out, false); fail("Should throw IllegalStateException"); } catch (IllegalStateException e) { //expected } } } public void testInternalState_Encoding_END() { CharsetEncoder newEncoder = cs.newEncoder(); //Init - >encoding_end { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); } //Reset -> encoding_end { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.reset(); newEncoder.encode(in, out, true); } //encoding -> encoding_end { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, false); in = CharBuffer.wrap("BC"); newEncoder.encode(in, out, true); } //Reset -> encoding_end { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); in = CharBuffer.wrap("BC"); newEncoder.encode(in, out, true); } //Flushed -> encoding_end { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); newEncoder.flush(out); in = CharBuffer.wrap("BC"); try { newEncoder.encode(in, out, true); fail("Should throw IllegalStateException"); } catch (IllegalStateException e) { //expected } } } public void testInternalState_Flushed() { CharsetEncoder newEncoder = cs.newEncoder(); //init -> flushed { ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.flush(out); } //reset - > flushed { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); newEncoder.reset(); newEncoder.flush(out); } //encoding - > flushed { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, false); try { newEncoder.flush(out); fail("Should throw IllegalStateException"); } catch (IllegalStateException e) { // expected } } //encoding_end -> flushed { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); newEncoder.flush(out); } //flushd - > flushed { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); newEncoder.flush(out); try { newEncoder.flush(out); fail("Should throw IllegalStateException"); } catch (IllegalStateException e) { // expected } } } public void testInternalState_Encode() throws CharacterCodingException { CharsetEncoder newEncoder = cs.newEncoder(); //Init - > encode { CharBuffer in = CharBuffer.wrap("A"); newEncoder.encode(in); } //Reset - > encode { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); newEncoder.encode(in); } //Encoding -> encode { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, false); in = CharBuffer.wrap("BC"); newEncoder.encode(in); } //Encoding_end -> encode { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); in = CharBuffer.wrap("BC"); newEncoder.encode(in); } //Flushed -> reset { newEncoder.reset(); CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = ByteBuffer.allocate(0x10); newEncoder.encode(in, out, true); in = CharBuffer.wrap("BC"); newEncoder.flush(out); out = newEncoder.encode(in); } } public void testInternalState_from_Encode() throws CharacterCodingException { CharsetEncoder newEncoder = cs.newEncoder(); //Encode -> Reset { CharBuffer in = CharBuffer.wrap("A"); newEncoder.encode(in); newEncoder.reset(); } // Encode -> encoding { CharBuffer in = CharBuffer.wrap("A"); newEncoder.encode(in); ByteBuffer out = ByteBuffer.allocate(0x10); try { newEncoder.encode(in, out, false); fail("Should throw IllegalStateException"); } catch (IllegalStateException e) { // expected } } //Encode -> Encoding_end { CharBuffer in = CharBuffer.wrap("A"); newEncoder.encode(in); ByteBuffer out = ByteBuffer.allocate(0x10); try { newEncoder.encode(in, out, true); fail("Should throw IllegalStateException"); } catch (IllegalStateException e) { // expected } } //Encode -> Flushed { CharBuffer in = CharBuffer.wrap("A"); ByteBuffer out = newEncoder.encode(in); try { newEncoder.flush(out); fail("Should throw IllegalStateException"); } catch (IllegalStateException e) { // expected } } //Encode - > encode { CharBuffer in = CharBuffer.wrap("A"); newEncoder.encode(in); in = CharBuffer.wrap("BC"); newEncoder.encode(in); } } }
package com.atguigu.gmall.sms.controller; import java.util.List; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import com.atguigu.gmall.sms.entity.SeckillSkuEntity; import com.atguigu.gmall.sms.service.SeckillSkuService; import com.atguigu.gmall.common.bean.PageResultVo; import com.atguigu.gmall.common.bean.ResponseVo; import com.atguigu.gmall.common.bean.PageParamVo; /** * 秒杀活动商品关联 * * @author chris * @email 543542806@qq.com * @date 2021-12-26 14:56:30 */ @Api(tags = "秒杀活动商品关联 管理") @RestController @RequestMapping("sms/seckillsku") public class SeckillSkuController { @Autowired private SeckillSkuService seckillSkuService; /** * 列表 */ @GetMapping @ApiOperation("分页查询") public ResponseVo<PageResultVo> querySeckillSkuByPage(PageParamVo paramVo){ PageResultVo pageResultVo = seckillSkuService.queryPage(paramVo); return ResponseVo.ok(pageResultVo); } /** * 信息 */ @GetMapping("{id}") @ApiOperation("详情查询") public ResponseVo<SeckillSkuEntity> querySeckillSkuById(@PathVariable("id") Long id){ SeckillSkuEntity seckillSku = seckillSkuService.getById(id); return ResponseVo.ok(seckillSku); } /** * 保存 */ @PostMapping @ApiOperation("保存") public ResponseVo<Object> save(@RequestBody SeckillSkuEntity seckillSku){ seckillSkuService.save(seckillSku); return ResponseVo.ok(); } /** * 修改 */ @PostMapping("/update") @ApiOperation("修改") public ResponseVo update(@RequestBody SeckillSkuEntity seckillSku){ seckillSkuService.updateById(seckillSku); return ResponseVo.ok(); } /** * 删除 */ @PostMapping("/delete") @ApiOperation("删除") public ResponseVo delete(@RequestBody List<Long> ids){ seckillSkuService.removeByIds(ids); return ResponseVo.ok(); } }
package com.talhahasanzia.sampleapp.parsers; import com.talhahasanzia.csv.core.ParseCallback; import com.talhahasanzia.csv.core.Parser; import com.talhahasanzia.csv.exceptions.ColumnsLengthException; import com.talhahasanzia.csv.models.Row; import com.talhahasanzia.sampleapp.models.SampleObject; import java.util.LinkedList; import java.util.List; public class SampleObjectParser implements Parser<SampleObject[]> { private int totalColumns; private Row titleRow; private Row[] rows; private final String splitterRegex = ",(?=([^\"]*\"[^\"]*\")*[^\"]*$)"; @Override public void parse(String data, ParseCallback<SampleObject[]> parseResult) { // clear cached instances on each time parse is called rows = null; titleRow = null; totalColumns = 0; Row[] rows; try { rows = getRows(data); } catch (ColumnsLengthException e) { parseResult.onParsingFailure(e); return; } SampleObject[] sampleObjects = new SampleObject[rows.length]; for (int i = 0; i < rows.length; i++) { sampleObjects[i] = new SampleObject( rows[i].getEntryAtColumn(0), rows[i].getEntryAtColumn(1), rows[i].getEntryAtColumn(2), Integer.parseInt(rows[i].getEntryAtColumn(3)) ); } parseResult.onParsingSuccess(sampleObjects); } @Override public Row[] getRows(String data) throws ColumnsLengthException { if (rows == null) { List<Row> rowList = new LinkedList<>(); int previous = 0; int index; char[] dataArray = data.toCharArray(); for (index = previous; index < data.length(); index++) { if (dataArray[index] == '\n') { String[] words = getWords(data.substring(previous, index)); if (words.length != getTitles(data).getEntries().length) { throw new ColumnsLengthException(titleRow.getEntries().length, words.length); } Row row = new Row(words); rowList.add(row); previous = ++index; } } if (index > ++previous) { // a line was missed String[] words = getWords(data.substring(previous, data.length())); if (words.length != getTitles(data).getEntries().length) { throw new ColumnsLengthException(titleRow.getEntries().length, words.length); } Row row = new Row(words); rowList.add(row); } rowList.remove(0); // remove title column from the data // cache rows rows = rowList.toArray(new Row[0]); } return rows; } @Override public String[] getWords(String rowAsString) { return rowAsString.split(splitterRegex); } @Override public Row getTitles(String data) { if (titleRow == null) { String firstLine = data.substring(0, data.indexOf('\n')); // cache parsed titles String[] splitted = firstLine.split(splitterRegex); this.titleRow = new Row(splitted); } return titleRow; } @Override public int getTotalColumns(Row titles) { if (totalColumns == 0) { // cache total columns this.totalColumns = titles.getEntries().length; } return totalColumns; } }
package fact.it.www.control; import fact.it.www.beans.RoomSize; import java.io.IOException; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * * @author Ali */ @WebServlet(name = "PaintServlet", urlPatterns = {"/PaintServlet"}) public class PaintServlet extends HttpServlet { /** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { double height = Double.parseDouble(request.getParameter("height")); double length = Double.parseDouble(request.getParameter("length")); double width = Double.parseDouble(request.getParameter("width")); RoomSize rs = new RoomSize(height, length, width); if (request.getParameter("ceiling") != null) { rs.setCeiling(true); } request.setAttribute("renovation", rs); RequestDispatcher dispatcher = request.getRequestDispatcher("paintVolume.jsp"); dispatcher.forward(request, response); } // <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code."> /** * Handles the HTTP <code>GET</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Handles the HTTP <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Returns a short description of the servlet. * * @return a String containing servlet description */ @Override public String getServletInfo() { return "Short description"; }// </editor-fold> }
package com.proximyst.spawnertax; import com.google.common.base.Enums; import lombok.Getter; import lombok.val; import net.milkbowl.vault.economy.Economy; import org.bukkit.entity.EntityType; import org.bukkit.event.HandlerList; import org.bukkit.permissions.Permission; import org.bukkit.permissions.PermissionDefault; import org.bukkit.plugin.Plugin; import org.bukkit.plugin.java.JavaPlugin; import java.util.HashMap; import java.util.Map; public class Main extends JavaPlugin { public static final String PERMISSION_RELOAD = "spawnertax.reload"; public static final String PERMISSION_MINE = "spawnertax.mine"; public static final String PERMISSION_FREE = "spawnertax.free"; @Getter private Economy economy; @Getter private final Map<EntityType, Integer> taxes = new HashMap<>(); @Getter private final Map<Double, Permission> discounts = new HashMap<>(); @Override public void onEnable() { { final Plugin vault = getServer().getPluginManager().getPlugin("Vault"); if (vault == null || !vault.isEnabled()) { getLogger().warning("The plugin cannot enable without Vault!"); setEnabled(false); return; } val economyService = getServer().getServicesManager().getRegistration(Economy.class); if (economyService == null || (economy = economyService.getProvider()) == null) { getLogger().warning("The plugin cannot enable without an economy plugin!"); setEnabled(false); return; } } // economy is now non-null saveDefaultConfig(); reloadTaxes(); reloadPermissions(); getServer().getPluginManager().registerEvents( new SpawnerBreakListener(this), this ); getCommand("spawnertax-reload").setExecutor(new ReloadConfigCommand(this)); } @Override public void onDisable() { HandlerList.unregisterAll(this); discounts.values().forEach(getServer().getPluginManager()::removePermission); discounts.clear(); taxes.clear(); } protected void reloadPermissions() { discounts.values().forEach(getServer().getPluginManager()::removePermission); discounts.clear(); val discountSection = getConfig().getConfigurationSection("discounts"); if (discountSection == null) { return; } for (String key : discountSection.getKeys(false)) { if (key == null) { continue; } key = key.toLowerCase().trim().replace( ' ', '_' ); if (key.isEmpty()) { continue; } val discount = discountSection.getDouble( key, 0d ); if (discount <= 0) { continue; } val permission = new Permission( "spawnertax.discount." + key, "A " + discount + "% discount on breaking spawners", PermissionDefault.FALSE ); getServer().getPluginManager().addPermission(permission); } } protected void reloadTaxes() { taxes.clear(); val taxSection = getConfig().getConfigurationSection("taxes"); if (taxSection == null) { getLogger().warning("There are no taxes defined, therefore the plugin is useless and disabling."); setEnabled(false); return; } for (String key : taxSection.getKeys(false)) { if (key.equals("_")) { taxes.put( null, taxSection.getInt( key, 0 ) ); } val type = Enums.getIfPresent( EntityType.class, key.toUpperCase() ); if (!type.isPresent()) { getLogger().warning("Unknown entity type: " + key.toUpperCase()); continue; } val price = taxSection.getInt( key, 0 ); taxes.put( type.get(), price ); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: app/proxyman/command/command.proto package com.xray.app.proxyman.command; /** * Protobuf type {@code xray.app.proxyman.command.AlterInboundRequest} */ public final class AlterInboundRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:xray.app.proxyman.command.AlterInboundRequest) AlterInboundRequestOrBuilder { private static final long serialVersionUID = 0L; // Use AlterInboundRequest.newBuilder() to construct. private AlterInboundRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AlterInboundRequest() { tag_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AlterInboundRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); tag_ = s; break; } case 18: { com.xray.common.serial.TypedMessage.Builder subBuilder = null; if (operation_ != null) { subBuilder = operation_.toBuilder(); } operation_ = input.readMessage(com.xray.common.serial.TypedMessage.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(operation_); operation_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xray.app.proxyman.command.Command.internal_static_xray_app_proxyman_command_AlterInboundRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.xray.app.proxyman.command.Command.internal_static_xray_app_proxyman_command_AlterInboundRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.xray.app.proxyman.command.AlterInboundRequest.class, com.xray.app.proxyman.command.AlterInboundRequest.Builder.class); } public static final int TAG_FIELD_NUMBER = 1; private volatile java.lang.Object tag_; /** * <code>string tag = 1;</code> */ public java.lang.String getTag() { java.lang.Object ref = tag_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tag_ = s; return s; } } /** * <code>string tag = 1;</code> */ public com.google.protobuf.ByteString getTagBytes() { java.lang.Object ref = tag_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); tag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OPERATION_FIELD_NUMBER = 2; private com.xray.common.serial.TypedMessage operation_; /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public boolean hasOperation() { return operation_ != null; } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public com.xray.common.serial.TypedMessage getOperation() { return operation_ == null ? com.xray.common.serial.TypedMessage.getDefaultInstance() : operation_; } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public com.xray.common.serial.TypedMessageOrBuilder getOperationOrBuilder() { return getOperation(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getTagBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tag_); } if (operation_ != null) { output.writeMessage(2, getOperation()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getTagBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tag_); } if (operation_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getOperation()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.xray.app.proxyman.command.AlterInboundRequest)) { return super.equals(obj); } com.xray.app.proxyman.command.AlterInboundRequest other = (com.xray.app.proxyman.command.AlterInboundRequest) obj; if (!getTag() .equals(other.getTag())) return false; if (hasOperation() != other.hasOperation()) return false; if (hasOperation()) { if (!getOperation() .equals(other.getOperation())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TAG_FIELD_NUMBER; hash = (53 * hash) + getTag().hashCode(); if (hasOperation()) { hash = (37 * hash) + OPERATION_FIELD_NUMBER; hash = (53 * hash) + getOperation().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.xray.app.proxyman.command.AlterInboundRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.xray.app.proxyman.command.AlterInboundRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.xray.app.proxyman.command.AlterInboundRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.xray.app.proxyman.command.AlterInboundRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code xray.app.proxyman.command.AlterInboundRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:xray.app.proxyman.command.AlterInboundRequest) com.xray.app.proxyman.command.AlterInboundRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xray.app.proxyman.command.Command.internal_static_xray_app_proxyman_command_AlterInboundRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.xray.app.proxyman.command.Command.internal_static_xray_app_proxyman_command_AlterInboundRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.xray.app.proxyman.command.AlterInboundRequest.class, com.xray.app.proxyman.command.AlterInboundRequest.Builder.class); } // Construct using com.xray.app.proxyman.command.AlterInboundRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); tag_ = ""; if (operationBuilder_ == null) { operation_ = null; } else { operation_ = null; operationBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.xray.app.proxyman.command.Command.internal_static_xray_app_proxyman_command_AlterInboundRequest_descriptor; } @java.lang.Override public com.xray.app.proxyman.command.AlterInboundRequest getDefaultInstanceForType() { return com.xray.app.proxyman.command.AlterInboundRequest.getDefaultInstance(); } @java.lang.Override public com.xray.app.proxyman.command.AlterInboundRequest build() { com.xray.app.proxyman.command.AlterInboundRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.xray.app.proxyman.command.AlterInboundRequest buildPartial() { com.xray.app.proxyman.command.AlterInboundRequest result = new com.xray.app.proxyman.command.AlterInboundRequest(this); result.tag_ = tag_; if (operationBuilder_ == null) { result.operation_ = operation_; } else { result.operation_ = operationBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.xray.app.proxyman.command.AlterInboundRequest) { return mergeFrom((com.xray.app.proxyman.command.AlterInboundRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.xray.app.proxyman.command.AlterInboundRequest other) { if (other == com.xray.app.proxyman.command.AlterInboundRequest.getDefaultInstance()) return this; if (!other.getTag().isEmpty()) { tag_ = other.tag_; onChanged(); } if (other.hasOperation()) { mergeOperation(other.getOperation()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.xray.app.proxyman.command.AlterInboundRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.xray.app.proxyman.command.AlterInboundRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object tag_ = ""; /** * <code>string tag = 1;</code> */ public java.lang.String getTag() { java.lang.Object ref = tag_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tag_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>string tag = 1;</code> */ public com.google.protobuf.ByteString getTagBytes() { java.lang.Object ref = tag_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); tag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>string tag = 1;</code> */ public Builder setTag( java.lang.String value) { if (value == null) { throw new NullPointerException(); } tag_ = value; onChanged(); return this; } /** * <code>string tag = 1;</code> */ public Builder clearTag() { tag_ = getDefaultInstance().getTag(); onChanged(); return this; } /** * <code>string tag = 1;</code> */ public Builder setTagBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); tag_ = value; onChanged(); return this; } private com.xray.common.serial.TypedMessage operation_; private com.google.protobuf.SingleFieldBuilderV3< com.xray.common.serial.TypedMessage, com.xray.common.serial.TypedMessage.Builder, com.xray.common.serial.TypedMessageOrBuilder> operationBuilder_; /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public boolean hasOperation() { return operationBuilder_ != null || operation_ != null; } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public com.xray.common.serial.TypedMessage getOperation() { if (operationBuilder_ == null) { return operation_ == null ? com.xray.common.serial.TypedMessage.getDefaultInstance() : operation_; } else { return operationBuilder_.getMessage(); } } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public Builder setOperation(com.xray.common.serial.TypedMessage value) { if (operationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { operationBuilder_.setMessage(value); } return this; } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public Builder setOperation( com.xray.common.serial.TypedMessage.Builder builderForValue) { if (operationBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { operationBuilder_.setMessage(builderForValue.build()); } return this; } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public Builder mergeOperation(com.xray.common.serial.TypedMessage value) { if (operationBuilder_ == null) { if (operation_ != null) { operation_ = com.xray.common.serial.TypedMessage.newBuilder(operation_).mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { operationBuilder_.mergeFrom(value); } return this; } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public Builder clearOperation() { if (operationBuilder_ == null) { operation_ = null; onChanged(); } else { operation_ = null; operationBuilder_ = null; } return this; } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public com.xray.common.serial.TypedMessage.Builder getOperationBuilder() { onChanged(); return getOperationFieldBuilder().getBuilder(); } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ public com.xray.common.serial.TypedMessageOrBuilder getOperationOrBuilder() { if (operationBuilder_ != null) { return operationBuilder_.getMessageOrBuilder(); } else { return operation_ == null ? com.xray.common.serial.TypedMessage.getDefaultInstance() : operation_; } } /** * <code>.xray.common.serial.TypedMessage operation = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.xray.common.serial.TypedMessage, com.xray.common.serial.TypedMessage.Builder, com.xray.common.serial.TypedMessageOrBuilder> getOperationFieldBuilder() { if (operationBuilder_ == null) { operationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.xray.common.serial.TypedMessage, com.xray.common.serial.TypedMessage.Builder, com.xray.common.serial.TypedMessageOrBuilder>( getOperation(), getParentForChildren(), isClean()); operation_ = null; } return operationBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:xray.app.proxyman.command.AlterInboundRequest) } // @@protoc_insertion_point(class_scope:xray.app.proxyman.command.AlterInboundRequest) private static final com.xray.app.proxyman.command.AlterInboundRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.xray.app.proxyman.command.AlterInboundRequest(); } public static com.xray.app.proxyman.command.AlterInboundRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AlterInboundRequest> PARSER = new com.google.protobuf.AbstractParser<AlterInboundRequest>() { @java.lang.Override public AlterInboundRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AlterInboundRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<AlterInboundRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AlterInboundRequest> getParserForType() { return PARSER; } @java.lang.Override public com.xray.app.proxyman.command.AlterInboundRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.math.expr; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.List; import java.util.Set; /** * */ public class ParserTest { @Test public void testSimple() { String actual = Parser.parse("1", ExprMacroTable.nil()).toString(); String expected = "1"; Assert.assertEquals(expected, actual); } @Test public void testSimpleUnaryOps1() { String actual = Parser.parse("-x", ExprMacroTable.nil()).toString(); String expected = "-x"; Assert.assertEquals(expected, actual); actual = Parser.parse("!x", ExprMacroTable.nil()).toString(); expected = "!x"; Assert.assertEquals(expected, actual); } @Test public void testSimpleUnaryOps2() { validateFlatten("-1", "-1", "-1"); validateFlatten("--1", "--1", "1"); validateFlatten("-1+2", "(+ -1 2)", "1"); validateFlatten("-1*2", "(* -1 2)", "-2"); validateFlatten("-1^2", "(^ -1 2)", "1"); } @Test public void testSimpleLogicalOps1() { validateParser("x>y", "(> x y)", ImmutableList.of("x", "y")); validateParser("x<y", "(< x y)", ImmutableList.of("x", "y")); validateParser("x<=y", "(<= x y)", ImmutableList.of("x", "y")); validateParser("x>=y", "(>= x y)", ImmutableList.of("x", "y")); validateParser("x==y", "(== x y)", ImmutableList.of("x", "y")); validateParser("x!=y", "(!= x y)", ImmutableList.of("x", "y")); validateParser("x && y", "(&& x y)", ImmutableList.of("x", "y")); validateParser("x || y", "(|| x y)", ImmutableList.of("x", "y")); } @Test public void testSimpleAdditivityOp1() { validateParser("x+y", "(+ x y)", ImmutableList.of("x", "y")); validateParser("x-y", "(- x y)", ImmutableList.of("x", "y")); } @Test public void testSimpleAdditivityOp2() { validateParser("x+y+z", "(+ (+ x y) z)", ImmutableList.of("x", "y", "z")); validateParser("x+y-z", "(- (+ x y) z)", ImmutableList.of("x", "y", "z")); validateParser("x-y+z", "(+ (- x y) z)", ImmutableList.of("x", "y", "z")); validateParser("x-y-z", "(- (- x y) z)", ImmutableList.of("x", "y", "z")); validateParser("x-y-x", "(- (- x y) x)", ImmutableList.of("x", "y"), ImmutableSet.of("x", "x_0", "y")); } @Test public void testSimpleMultiplicativeOp1() { validateParser("x*y", "(* x y)", ImmutableList.of("x", "y")); validateParser("x/y", "(/ x y)", ImmutableList.of("x", "y")); validateParser("x%y", "(% x y)", ImmutableList.of("x", "y")); } @Test public void testSimpleMultiplicativeOp2() { validateFlatten("1*2*3", "(* (* 1 2) 3)", "6"); validateFlatten("1*2/3", "(/ (* 1 2) 3)", "0"); validateFlatten("1/2*3", "(* (/ 1 2) 3)", "0"); validateFlatten("1/2/3", "(/ (/ 1 2) 3)", "0"); validateFlatten("1.0*2*3", "(* (* 1.0 2) 3)", "6.0"); validateFlatten("1.0*2/3", "(/ (* 1.0 2) 3)", "0.6666666666666666"); validateFlatten("1.0/2*3", "(* (/ 1.0 2) 3)", "1.5"); validateFlatten("1.0/2/3", "(/ (/ 1.0 2) 3)", "0.16666666666666666"); // partial validateFlatten("1.0*2*x", "(* (* 1.0 2) x)", "(* 2.0 x)"); validateFlatten("1.0*2/x", "(/ (* 1.0 2) x)", "(/ 2.0 x)"); validateFlatten("1.0/2*x", "(* (/ 1.0 2) x)", "(* 0.5 x)"); validateFlatten("1.0/2/x", "(/ (/ 1.0 2) x)", "(/ 0.5 x)"); // not working yet validateFlatten("1.0*x*3", "(* (* 1.0 x) 3)", "(* (* 1.0 x) 3)"); } @Test public void testSimpleCarrot1() { validateFlatten("1^2", "(^ 1 2)", "1"); } @Test public void testSimpleCarrot2() { validateFlatten("1^2^3", "(^ 1 (^ 2 3))", "1"); } @Test public void testMixed() { validateFlatten("1+2*3", "(+ 1 (* 2 3))", "7"); validateFlatten("1+(2*3)", "(+ 1 (* 2 3))", "7"); validateFlatten("(1+2)*3", "(* (+ 1 2) 3)", "9"); validateFlatten("1*2+3", "(+ (* 1 2) 3)", "5"); validateFlatten("(1*2)+3", "(+ (* 1 2) 3)", "5"); validateFlatten("1*(2+3)", "(* 1 (+ 2 3))", "5"); validateFlatten("1+2^3", "(+ 1 (^ 2 3))", "9"); validateFlatten("1+(2^3)", "(+ 1 (^ 2 3))", "9"); validateFlatten("(1+2)^3", "(^ (+ 1 2) 3)", "27"); validateFlatten("1^2+3", "(+ (^ 1 2) 3)", "4"); validateFlatten("(1^2)+3", "(+ (^ 1 2) 3)", "4"); validateFlatten("1^(2+3)", "(^ 1 (+ 2 3))", "1"); validateFlatten("1^2*3+4", "(+ (* (^ 1 2) 3) 4)", "7"); validateFlatten("-1^2*-3+-4", "(+ (* (^ -1 2) -3) -4)", "-7"); validateFlatten("max(3, 4)", "(max [3, 4])", "4"); validateFlatten("min(1, max(3, 4))", "(min [1, (max [3, 4])])", "1"); } @Test public void testIdentifiers() { validateParser("foo", "foo", ImmutableList.of("foo"), ImmutableSet.of()); validateParser("\"foo\"", "foo", ImmutableList.of("foo"), ImmutableSet.of()); validateParser("\"foo bar\"", "foo bar", ImmutableList.of("foo bar"), ImmutableSet.of()); validateParser("\"foo\\\"bar\"", "foo\"bar", ImmutableList.of("foo\"bar"), ImmutableSet.of()); } @Test public void testLiterals() { validateConstantExpression("\'foo\'", "foo"); validateConstantExpression("\'foo bar\'", "foo bar"); validateConstantExpression("\'föo bar\'", "föo bar"); validateConstantExpression("\'f\\u0040o bar\'", "f@o bar"); validateConstantExpression("\'f\\u000Ao \\'b\\\\\\\"ar\'", "f\no 'b\\\"ar"); } @Test public void testLiteralArrays() { validateConstantExpression("[1.0, 2.345]", new Double[]{1.0, 2.345}); validateConstantExpression("[1, 3]", new Long[]{1L, 3L}); validateConstantExpression("[\'hello\', \'world\']", new String[]{"hello", "world"}); } @Test public void testFunctions() { validateParser("sqrt(x)", "(sqrt [x])", ImmutableList.of("x")); validateParser("if(cond,then,else)", "(if [cond, then, else])", ImmutableList.of("else", "then", "cond")); validateParser("cast(x, 'STRING')", "(cast [x, STRING])", ImmutableList.of("x")); validateParser("cast(x, 'LONG')", "(cast [x, LONG])", ImmutableList.of("x")); validateParser("cast(x, 'DOUBLE')", "(cast [x, DOUBLE])", ImmutableList.of("x")); validateParser( "cast(x, 'STRING_ARRAY')", "(cast [x, STRING_ARRAY])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "cast(x, 'LONG_ARRAY')", "(cast [x, LONG_ARRAY])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "cast(x, 'DOUBLE_ARRAY')", "(cast [x, DOUBLE_ARRAY])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "array_length(x)", "(array_length [x])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "array_concat(x, y)", "(array_concat [x, y])", ImmutableList.of("x", "y"), ImmutableSet.of(), ImmutableSet.of("x", "y") ); validateParser( "array_append(x, y)", "(array_append [x, y])", ImmutableList.of("x", "y"), ImmutableSet.of("y"), ImmutableSet.of("x") ); validateFlatten("sqrt(4)", "(sqrt [4])", "2.0"); validateFlatten("array_concat([1, 2], [3, 4])", "(array_concat [[1, 2], [3, 4]])", "[1, 2, 3, 4]"); } @Test public void testApplyFunctions() { validateParser( "map(() -> 1, x)", "(map ([] -> 1), [x])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "map((x) -> x + 1, x)", "(map ([x] -> (+ x 1)), [x])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "x + map((x) -> x + 1, y)", "(+ x (map ([x] -> (+ x 1)), [y]))", ImmutableList.of("x", "y"), ImmutableSet.of("x"), ImmutableSet.of("y") ); validateParser( "x + map((x) -> x + 1, x)", "(+ x (map ([x] -> (+ x 1)), [x]))", ImmutableList.of("x"), ImmutableSet.of("x"), ImmutableSet.of("x_0") ); validateParser( "map((x) -> concat(x, y), z)", "(map ([x] -> (concat [x, y])), [z])", ImmutableList.of("y", "z"), ImmutableSet.of("y"), ImmutableSet.of("z") ); // 'y' is accumulator, and currently unknown validateParser( "fold((x, acc) -> acc + x, x, y)", "(fold ([x, acc] -> (+ acc x)), [x, y])", ImmutableList.of("x", "y"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "fold((x, acc) -> acc + x, map((x) -> x + 1, x), y)", "(fold ([x, acc] -> (+ acc x)), [(map ([x] -> (+ x 1)), [x]), y])", ImmutableList.of("x", "y"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "array_append(z, fold((x, acc) -> acc + x, map((x) -> x + 1, x), y))", "(array_append [z, (fold ([x, acc] -> (+ acc x)), [(map ([x] -> (+ x 1)), [x]), y])])", ImmutableList.of("x", "y", "z"), ImmutableSet.of(), ImmutableSet.of("x", "z") ); validateParser( "map(z -> z + 1, array_append(z, fold((x, acc) -> acc + x, map((x) -> x + 1, x), y)))", "(map ([z] -> (+ z 1)), [(array_append [z, (fold ([x, acc] -> (+ acc x)), [(map ([x] -> (+ x 1)), [x]), y])])])", ImmutableList.of("x", "y", "z"), ImmutableSet.of(), ImmutableSet.of("x", "z") ); validateParser( "array_append(map(z -> z + 1, array_append(z, fold((x, acc) -> acc + x, map((x) -> x + 1, x), y))), a)", "(array_append [(map ([z] -> (+ z 1)), [(array_append [z, (fold ([x, acc] -> (+ acc x)), [(map ([x] -> (+ x 1)), [x]), y])])]), a])", ImmutableList.of("a", "x", "y", "z"), ImmutableSet.of("a"), ImmutableSet.of("x", "z") ); validateFlatten("map((x) -> x + 1, [1, 2, 3, 4])", "(map ([x] -> (+ x 1)), [[1, 2, 3, 4]])", "[2, 3, 4, 5]"); validateFlatten( "map((x) -> x + z, [1, 2, 3, 4])", "(map ([x] -> (+ x z)), [[1, 2, 3, 4]])", "(map ([x] -> (+ x z)), [[1, 2, 3, 4]])" ); } @Test public void testApplyUnapplied() { validateApplyUnapplied("x + 1", "(+ x 1)", "(+ x 1)", ImmutableList.of()); validateApplyUnapplied("x + 1", "(+ x 1)", "(+ x 1)", ImmutableList.of("z")); validateApplyUnapplied("x + y", "(+ x y)", "(map ([x] -> (+ x y)), [x])", ImmutableList.of("x")); validateApplyUnapplied( "x + y", "(+ x y)", "(cartesian_map ([x, y] -> (+ x y)), [x, y])", ImmutableList.of("x", "y") ); validateApplyUnapplied( "map(x -> x + y, x)", "(map ([x] -> (+ x y)), [x])", "(cartesian_map ([x, y] -> (+ x y)), [x, y])", ImmutableList.of("y") ); validateApplyUnapplied( "map(x -> x + 1, x + 1)", "(map ([x] -> (+ x 1)), [(+ x 1)])", "(map ([x] -> (+ x 1)), [(map ([x] -> (+ x 1)), [x])])", ImmutableList.of("x") ); validateApplyUnapplied( "fold((x, acc) -> acc + x + y, x, 0)", "(fold ([x, acc] -> (+ (+ acc x) y)), [x, 0])", "(cartesian_fold ([x, y, acc] -> (+ (+ acc x) y)), [x, y, 0])", ImmutableList.of("y") ); validateApplyUnapplied( "z + fold((x, acc) -> acc + x + y, x, 0)", "(+ z (fold ([x, acc] -> (+ (+ acc x) y)), [x, 0]))", "(+ z (cartesian_fold ([x, y, acc] -> (+ (+ acc x) y)), [x, y, 0]))", ImmutableList.of("y") ); validateApplyUnapplied( "z + fold((x, acc) -> acc + x + y, x, 0)", "(+ z (fold ([x, acc] -> (+ (+ acc x) y)), [x, 0]))", "(map ([z] -> (+ z (cartesian_fold ([x, y, acc] -> (+ (+ acc x) y)), [x, y, 0]))), [z])", ImmutableList.of("y", "z") ); validateApplyUnapplied( "array_to_string(concat(x, 'hello'), ',')", "(array_to_string [(concat [x, hello]), ,])", "(array_to_string [(map ([x] -> (concat [x, hello])), [x]), ,])", ImmutableList.of("x", "y") ); validateApplyUnapplied( "cast(x, 'LONG')", "(cast [x, LONG])", "(map ([x] -> (cast [x, LONG])), [x])", ImmutableList.of("x") ); validateApplyUnapplied( "cartesian_map((x,y) -> x + y, x, y)", "(cartesian_map ([x, y] -> (+ x y)), [x, y])", "(cartesian_map ([x, y] -> (+ x y)), [x, y])", ImmutableList.of("y") ); validateApplyUnapplied( "cast(x, 'LONG_ARRAY')", "(cast [x, LONG_ARRAY])", "(cast [x, LONG_ARRAY])", ImmutableList.of("x") ); } @Test public void testUniquify() { validateParser("x-x", "(- x x)", ImmutableList.of("x"), ImmutableSet.of("x", "x_0")); validateParser( "x - x + x", "(+ (- x x) x)", ImmutableList.of("x"), ImmutableSet.of("x", "x_0", "x_1") ); validateParser( "map((x) -> x + x, x)", "(map ([x] -> (+ x x)), [x])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateApplyUnapplied( "x + x", "(+ x x)", "(cartesian_map ([x, x_0] -> (+ x x_0)), [x, x])", ImmutableList.of("x") ); validateApplyUnapplied( "x + x + x", "(+ (+ x x) x)", "(cartesian_map ([x, x_0, x_1] -> (+ (+ x x_0) x_1)), [x, x, x])", ImmutableList.of("x") ); // heh validateApplyUnapplied( "x + x + x + y + y + y + y + z + z + z", "(+ (+ (+ (+ (+ (+ (+ (+ (+ x x) x) y) y) y) y) z) z) z)", "(cartesian_map ([x, x_0, x_1, y, y_2, y_3, y_4, z, z_5, z_6] -> (+ (+ (+ (+ (+ (+ (+ (+ (+ x x_0) x_1) y) y_2) y_3) y_4) z) z_5) z_6)), [x, x, x, y, y, y, y, z, z, z])", ImmutableList.of("x", "y", "z") ); } private void validateFlatten(String expression, String withoutFlatten, String withFlatten) { Assert.assertEquals(expression, withoutFlatten, Parser.parse(expression, ExprMacroTable.nil(), false).toString()); Assert.assertEquals(expression, withFlatten, Parser.parse(expression, ExprMacroTable.nil(), true).toString()); } private void validateParser(String expression, String expected, List<String> identifiers) { validateParser(expression, expected, identifiers, ImmutableSet.copyOf(identifiers), Collections.emptySet()); } private void validateParser(String expression, String expected, List<String> identifiers, Set<String> scalars) { validateParser(expression, expected, identifiers, scalars, Collections.emptySet()); } private void validateParser( String expression, String expected, List<String> identifiers, Set<String> scalars, Set<String> arrays ) { final Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); final Expr.BindingDetails deets = parsed.analyzeInputs(); Assert.assertEquals(expression, expected, parsed.toString()); Assert.assertEquals(expression, identifiers, deets.getRequiredColumnsList()); Assert.assertEquals(expression, scalars, deets.getScalarVariables()); Assert.assertEquals(expression, arrays, deets.getArrayVariables()); } private void validateApplyUnapplied( String expression, String unapplied, String applied, List<String> identifiers ) { final Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); Expr.BindingDetails deets = parsed.analyzeInputs(); Parser.validateExpr(parsed, deets); final Expr transformed = Parser.applyUnappliedIdentifiers(parsed, deets, identifiers); Assert.assertEquals(expression, unapplied, parsed.toString()); Assert.assertEquals(applied, applied, transformed.toString()); } private void validateConstantExpression(String expression, Object expected) { Assert.assertEquals( expression, expected, Parser.parse(expression, ExprMacroTable.nil()).eval(Parser.withMap(ImmutableMap.of())).value() ); } private void validateConstantExpression(String expression, Object[] expected) { Assert.assertArrayEquals( expression, expected, (Object[]) Parser.parse(expression, ExprMacroTable.nil()).eval(Parser.withMap(ImmutableMap.of())).value() ); } }
/* * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazon.opendistroforelasticsearch.ad.transport; import com.amazon.opendistroforelasticsearch.ad.ml.ModelManager; import com.amazon.opendistroforelasticsearch.ad.ml.ThresholdingResult; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class ThresholdResultTransportAction extends HandledTransportAction<ThresholdResultRequest, ThresholdResultResponse> { private static final Logger LOG = LogManager.getLogger(ThresholdResultTransportAction.class); private ModelManager manager; @Inject public ThresholdResultTransportAction(ActionFilters actionFilters, TransportService transportService, ModelManager manager) { super(ThresholdResultAction.NAME, transportService, actionFilters, ThresholdResultRequest::new); this.manager = manager; } @Override protected void doExecute(Task task, ThresholdResultRequest request, ActionListener<ThresholdResultResponse> listener) { try { LOG.info("Serve threshold request for {}", request.getModelID()); ThresholdingResult result = manager.getThresholdingResult(request.getAdID(), request.getModelID(), request.getRCFScore()); listener.onResponse(new ThresholdResultResponse(result.getGrade(), result.getConfidence())); } catch (Exception e) { LOG.error(e); listener.onFailure(e); } } }
package m.co.rh.id.a_news_provider.app.ui.component.settings; import android.app.Activity; import android.os.Handler; import android.view.View; import android.view.ViewGroup; import android.widget.ProgressBar; import android.widget.ScrollView; import android.widget.TextView; import com.google.android.material.floatingactionbutton.FloatingActionButton; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.util.concurrent.ExecutorService; import io.reactivex.rxjava3.android.schedulers.AndroidSchedulers; import io.reactivex.rxjava3.schedulers.Schedulers; import io.reactivex.rxjava3.subjects.BehaviorSubject; import m.co.rh.id.a_news_provider.R; import m.co.rh.id.a_news_provider.app.provider.RxProviderModule; import m.co.rh.id.a_news_provider.app.rx.RxDisposer; import m.co.rh.id.a_news_provider.app.ui.component.AppBarSV; import m.co.rh.id.a_news_provider.app.util.UiUtils; import m.co.rh.id.a_news_provider.base.BaseApplication; import m.co.rh.id.a_news_provider.base.provider.FileHelper; import m.co.rh.id.alogger.ILogger; import m.co.rh.id.anavigator.StatefulView; import m.co.rh.id.anavigator.annotation.NavInject; import m.co.rh.id.aprovider.Provider; public class LogPage extends StatefulView<Activity> { private static final String TAG = LogPage.class.getName(); @NavInject private AppBarSV mAppBarSV; private transient Provider mSvProvider; public LogPage() { mAppBarSV = new AppBarSV(); } @Override protected View createView(Activity activity, ViewGroup container) { View view = activity.getLayoutInflater().inflate(R.layout.page_log, container, false); ViewGroup rootLayout = view.findViewById(R.id.root_layout); ViewGroup containerAppBar = view.findViewById(R.id.container_app_bar); mAppBarSV.setTitle(activity.getString(R.string.log_file)); containerAppBar.addView(mAppBarSV.buildView(activity, rootLayout)); ProgressBar progressBar = view.findViewById(R.id.progress_circular); View noRecord = view.findViewById(R.id.no_record); ScrollView scrollView = view.findViewById(R.id.scroll_view); TextView textView = view.findViewById(R.id.text_content); Provider provider = BaseApplication.of(activity).getProvider(); if (mSvProvider != null) { mSvProvider.dispose(); } mSvProvider = Provider.createProvider(activity.getApplicationContext(), new RxProviderModule()); FileHelper fileHelper = provider.get(FileHelper.class); File logFile = fileHelper.getLogFile(); FloatingActionButton fabClear = view.findViewById(R.id.fab_clear); FloatingActionButton fabShare = view.findViewById(R.id.fab_share); fabShare.setOnClickListener(v -> { try { UiUtils.shareFile(activity, logFile, activity.getString(R.string.share_log_file)); } catch (Throwable e) { provider.get(ILogger.class) .e(TAG, activity.getString(R.string.error_sharing_log_file), e); } }); BehaviorSubject<File> subject = BehaviorSubject.createDefault(logFile); fabClear.setOnClickListener(view1 -> { fileHelper.clearLogFile(); provider.get(ILogger.class).i(TAG, activity.getString(R.string.log_file_deleted)); provider.get(Handler.class) .post(() -> subject.onNext(logFile)); }); mSvProvider.get(RxDisposer.class).add("readLogFile", subject. observeOn(Schedulers.from(BaseApplication.of(activity) .getProvider().get(ExecutorService.class))) .map(file -> { if (!file.exists()) { return ""; } else { StringBuilder stringBuilder = new StringBuilder(); BufferedReader bufferedReader = new BufferedReader(new FileReader(file)); char[] buff = new char[2048]; int b = bufferedReader.read(buff); while (b != -1) { stringBuilder.append(buff); b = bufferedReader.read(buff); } return stringBuilder.toString(); } }) .observeOn(AndroidSchedulers.mainThread()) .subscribe(s -> { progressBar.setVisibility(View.GONE); textView.setText(s); if (s.isEmpty()) { noRecord.setVisibility(View.VISIBLE); scrollView.setVisibility(View.GONE); fabShare.setVisibility(View.GONE); fabClear.setVisibility(View.GONE); } else { noRecord.setVisibility(View.GONE); scrollView.setVisibility(View.VISIBLE); scrollView.post(() -> scrollView.fullScroll(View.FOCUS_DOWN)); fabShare.setVisibility(View.VISIBLE); fabClear.setVisibility(View.VISIBLE); } })); return view; } @Override public void dispose(Activity activity) { super.dispose(activity); if (mSvProvider != null) { mSvProvider.dispose(); mSvProvider = null; } } }
package org.wikipedia.analytics; import java.lang.System; @kotlin.Metadata(mv = {1, 6, 0}, k = 1, d1 = {"\u0000F\n\u0002\u0018\u0002\n\u0002\u0010\u0000\n\u0000\n\u0002\u0010\b\n\u0000\n\u0002\u0010\t\n\u0002\b\f\n\u0002\u0018\u0002\n\u0002\b\u0007\n\u0002\u0018\u0002\n\u0002\b\u001c\n\u0002\u0010\u0002\n\u0002\b\u0003\n\u0002\u0018\u0002\n\u0002\b\u0005\n\u0002\u0018\u0002\n\u0000\n\u0002\u0018\u0002\n\u0002\b\u0003\b\u0007\u0018\u0000 E2\u00020\u0001:\u0002DEBy\b\u0017\u0012\u0006\u0010\u0002\u001a\u00020\u0003\u0012\u0006\u0010\u0004\u001a\u00020\u0005\u0012\u0006\u0010\u0006\u001a\u00020\u0005\u0012\u0006\u0010\u0007\u001a\u00020\u0003\u0012\u0006\u0010\b\u001a\u00020\u0003\u0012\u0006\u0010\t\u001a\u00020\u0003\u0012\u0006\u0010\n\u001a\u00020\u0003\u0012\u0006\u0010\u000b\u001a\u00020\u0003\u0012\u0006\u0010\f\u001a\u00020\u0003\u0012\u0006\u0010\r\u001a\u00020\u0003\u0012\u0006\u0010\u000e\u001a\u00020\u0003\u0012\u0006\u0010\u000f\u001a\u00020\u0003\u0012\u0006\u0010\u0010\u001a\u00020\u0003\u0012\b\u0010\u0011\u001a\u0004\u0018\u00010\u0012\u00a2\u0006\u0002\u0010\u0013B\u0005\u00a2\u0006\u0002\u0010\u0014J\u000e\u00106\u001a\u0002072\u0006\u0010\u0019\u001a\u00020\u0005J\u0006\u00108\u001a\u000207J\u000e\u00109\u001a\u0002072\u0006\u0010:\u001a\u00020;J\u0006\u0010<\u001a\u000207J\u0006\u0010=\u001a\u00020\u0005J!\u0010>\u001a\u0002072\u0006\u0010?\u001a\u00020\u00002\u0006\u0010@\u001a\u00020A2\u0006\u0010B\u001a\u00020CH\u00c7\u0001R\u001a\u0010\u0006\u001a\u00020\u0005X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b\u0015\u0010\u0016\"\u0004\b\u0017\u0010\u0018R\u001c\u0010\u0019\u001a\b\u0012\u0004\u0012\u00020\u00050\u001a8\u0002X\u0083\u0004\u00a2\u0006\b\n\u0000\u0012\u0004\b\u001b\u0010\u0014R\u001a\u0010\u000e\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b\u001c\u0010\u001d\"\u0004\b\u001e\u0010\u001fR\u001a\u0010\u000b\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b \u0010\u001d\"\u0004\b!\u0010\u001fR\u001a\u0010\f\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b\"\u0010\u001d\"\u0004\b#\u0010\u001fR\u001a\u0010\n\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b$\u0010\u001d\"\u0004\b%\u0010\u001fR\u001a\u0010\t\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b&\u0010\u001d\"\u0004\b\'\u0010\u001fR\u001a\u0010\b\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b(\u0010\u001d\"\u0004\b)\u0010\u001fR\u001a\u0010\r\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b*\u0010\u001d\"\u0004\b+\u0010\u001fR\u001a\u0010\u0007\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b,\u0010\u001d\"\u0004\b-\u0010\u001fR\u001a\u0010\u0010\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b.\u0010\u001d\"\u0004\b/\u0010\u001fR\u001a\u0010\u000f\u001a\u00020\u0003X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b0\u0010\u001d\"\u0004\b1\u0010\u001fR\u001a\u0010\u0004\u001a\u00020\u0005X\u0086\u000e\u00a2\u0006\u000e\n\u0000\u001a\u0004\b2\u0010\u0016\"\u0004\b3\u0010\u0018R\u0011\u00104\u001a\u00020\u00038F\u00a2\u0006\u0006\u001a\u0004\b5\u0010\u001d\u00a8\u0006F"}, d2 = {"Lorg/wikipedia/analytics/SessionData;", "", "seen1", "", "startTime", "", "lastTouchTime", "pagesFromSearch", "pagesFromRandom", "pagesFromLangLink", "pagesFromInternal", "pagesFromExternal", "pagesFromHistory", "pagesFromReadingList", "pagesFromBack", "pagesWithNoDescription", "pagesFromSuggestedEdits", "serializationConstructorMarker", "Lkotlinx/serialization/internal/SerializationConstructorMarker;", "(IJJIIIIIIIIIILkotlinx/serialization/internal/SerializationConstructorMarker;)V", "()V", "getLastTouchTime", "()J", "setLastTouchTime", "(J)V", "leadLatency", "Lorg/wikipedia/util/MathUtil$Averaged;", "getLeadLatency$annotations", "getPagesFromBack", "()I", "setPagesFromBack", "(I)V", "getPagesFromExternal", "setPagesFromExternal", "getPagesFromHistory", "setPagesFromHistory", "getPagesFromInternal", "setPagesFromInternal", "getPagesFromLangLink", "setPagesFromLangLink", "getPagesFromRandom", "setPagesFromRandom", "getPagesFromReadingList", "setPagesFromReadingList", "getPagesFromSearch", "setPagesFromSearch", "getPagesFromSuggestedEdits", "setPagesFromSuggestedEdits", "getPagesWithNoDescription", "setPagesWithNoDescription", "getStartTime", "setStartTime", "totalPages", "getTotalPages", "addLeadLatency", "", "addPageFromBack", "addPageViewed", "entry", "Lorg/wikipedia/history/HistoryEntry;", "addPageWithNoDescription", "getLeadLatency", "write$Self", "self", "output", "Lkotlinx/serialization/encoding/CompositeEncoder;", "serialDesc", "Lkotlinx/serialization/descriptors/SerialDescriptor;", "$serializer", "Companion", "app_alphaDebug"}) @kotlinx.serialization.Serializable() public final class SessionData { @org.jetbrains.annotations.NotNull() public static final org.wikipedia.analytics.SessionData.Companion Companion = null; private final org.wikipedia.util.MathUtil.Averaged<java.lang.Long> leadLatency = null; private long startTime; private long lastTouchTime; private int pagesFromSearch = 0; private int pagesFromRandom = 0; private int pagesFromLangLink = 0; private int pagesFromInternal = 0; private int pagesFromExternal = 0; private int pagesFromHistory = 0; private int pagesFromReadingList = 0; private int pagesFromBack = 0; private int pagesWithNoDescription = 0; private int pagesFromSuggestedEdits = 0; public SessionData() { super(); } @kotlin.jvm.JvmStatic() public static final void write$Self(@org.jetbrains.annotations.NotNull() org.wikipedia.analytics.SessionData self, @org.jetbrains.annotations.NotNull() kotlinx.serialization.encoding.CompositeEncoder output, @org.jetbrains.annotations.NotNull() kotlinx.serialization.descriptors.SerialDescriptor serialDesc) { } @kotlinx.serialization.Transient() @java.lang.Deprecated() private static void getLeadLatency$annotations() { } public final long getStartTime() { return 0L; } public final void setStartTime(long p0) { } public final long getLastTouchTime() { return 0L; } public final void setLastTouchTime(long p0) { } public final int getPagesFromSearch() { return 0; } public final void setPagesFromSearch(int p0) { } public final int getPagesFromRandom() { return 0; } public final void setPagesFromRandom(int p0) { } public final int getPagesFromLangLink() { return 0; } public final void setPagesFromLangLink(int p0) { } public final int getPagesFromInternal() { return 0; } public final void setPagesFromInternal(int p0) { } public final int getPagesFromExternal() { return 0; } public final void setPagesFromExternal(int p0) { } public final int getPagesFromHistory() { return 0; } public final void setPagesFromHistory(int p0) { } public final int getPagesFromReadingList() { return 0; } public final void setPagesFromReadingList(int p0) { } public final int getPagesFromBack() { return 0; } public final void setPagesFromBack(int p0) { } public final int getPagesWithNoDescription() { return 0; } public final void setPagesWithNoDescription(int p0) { } public final int getPagesFromSuggestedEdits() { return 0; } public final void setPagesFromSuggestedEdits(int p0) { } public final void addPageViewed(@org.jetbrains.annotations.NotNull() org.wikipedia.history.HistoryEntry entry) { } public final long getLeadLatency() { return 0L; } public final void addLeadLatency(long leadLatency) { } public final void addPageFromBack() { } public final void addPageWithNoDescription() { } public final int getTotalPages() { return 0; } @kotlin.Metadata(mv = {1, 6, 0}, k = 1, d1 = {"\u0000\u0016\n\u0002\u0018\u0002\n\u0002\u0010\u0000\n\u0002\b\u0002\n\u0002\u0018\u0002\n\u0002\u0018\u0002\n\u0000\b\u0086\u0003\u0018\u00002\u00020\u0001B\u0007\b\u0002\u00a2\u0006\u0002\u0010\u0002J\u000f\u0010\u0003\u001a\b\u0012\u0004\u0012\u00020\u00050\u0004H\u00c6\u0001\u00a8\u0006\u0006"}, d2 = {"Lorg/wikipedia/analytics/SessionData$Companion;", "", "()V", "serializer", "Lkotlinx/serialization/KSerializer;", "Lorg/wikipedia/analytics/SessionData;", "app_alphaDebug"}) public static final class Companion { private Companion() { super(); } @org.jetbrains.annotations.NotNull() public final kotlinx.serialization.KSerializer<org.wikipedia.analytics.SessionData> serializer() { return null; } } @kotlin.Metadata(mv = {1, 6, 0}, k = 1, d1 = {"\u00006\n\u0000\n\u0002\u0018\u0002\n\u0002\u0018\u0002\n\u0002\b\u0002\n\u0002\u0018\u0002\n\u0002\b\u0003\n\u0002\u0010\u0011\n\u0002\u0018\u0002\n\u0002\b\u0003\n\u0002\u0018\u0002\n\u0000\n\u0002\u0010\u0002\n\u0000\n\u0002\u0018\u0002\n\u0002\b\u0002\b\u00c7\u0002\u0018\u00002\b\u0012\u0004\u0012\u00020\u00020\u0001B\u0007\b\u0002\u00a2\u0006\u0002\u0010\u0003J\u0018\u0010\b\u001a\f\u0012\b\u0012\u0006\u0012\u0002\b\u00030\n0\tH\u00d6\u0001\u00a2\u0006\u0002\u0010\u000bJ\u0011\u0010\f\u001a\u00020\u00022\u0006\u0010\r\u001a\u00020\u000eH\u00d6\u0001J\u0019\u0010\u000f\u001a\u00020\u00102\u0006\u0010\u0011\u001a\u00020\u00122\u0006\u0010\u0013\u001a\u00020\u0002H\u00d6\u0001R\u0014\u0010\u0004\u001a\u00020\u00058VX\u00d6\u0005\u00a2\u0006\u0006\u001a\u0004\b\u0006\u0010\u0007\u00a8\u0006\u0014"}, d2 = {"org/wikipedia/analytics/SessionData.$serializer", "Lkotlinx/serialization/internal/GeneratedSerializer;", "Lorg/wikipedia/analytics/SessionData;", "()V", "descriptor", "Lkotlinx/serialization/descriptors/SerialDescriptor;", "getDescriptor", "()Lkotlinx/serialization/descriptors/SerialDescriptor;", "childSerializers", "", "Lkotlinx/serialization/KSerializer;", "()[Lkotlinx/serialization/KSerializer;", "deserialize", "decoder", "Lkotlinx/serialization/encoding/Decoder;", "serialize", "", "encoder", "Lkotlinx/serialization/encoding/Encoder;", "value", "app_alphaDebug"}) @java.lang.Deprecated() public static final class $serializer implements kotlinx.serialization.internal.GeneratedSerializer<org.wikipedia.analytics.SessionData> { @org.jetbrains.annotations.NotNull() public static final org.wikipedia.analytics.SessionData.$serializer INSTANCE = null; private $serializer() { super(); } @org.jetbrains.annotations.NotNull() @java.lang.Override() public kotlinx.serialization.KSerializer<?>[] childSerializers() { return null; } @org.jetbrains.annotations.NotNull() @java.lang.Override() public org.wikipedia.analytics.SessionData deserialize(@org.jetbrains.annotations.NotNull() kotlinx.serialization.encoding.Decoder decoder) { return null; } @org.jetbrains.annotations.NotNull() @java.lang.Override() public kotlinx.serialization.descriptors.SerialDescriptor getDescriptor() { return null; } @java.lang.Override() public void serialize(@org.jetbrains.annotations.NotNull() kotlinx.serialization.encoding.Encoder encoder, @org.jetbrains.annotations.NotNull() org.wikipedia.analytics.SessionData value) { } @org.jetbrains.annotations.NotNull() public kotlinx.serialization.KSerializer<?>[] typeParametersSerializers() { return null; } } }
package ghost.framework.web.module.interceptors; import ghost.framework.beans.annotation.stereotype.Component; import ghost.framework.context.bean.factory.AbstractBeanFactoryContainer; /** * package: ghost.framework.web.module.interceptors * * @Author: 郭树灿{gsc-e590} * @link: 手机:13715848993, QQ 27048384 * @Description:http拦截器容器 * @Date: 2020/2/1:19:41 */ @Component public class HttpInterceptorContainer extends AbstractBeanFactoryContainer<IHttpInterceptor> implements IHttpInterceptorContainer<IHttpInterceptor> { }
/* * Copyright (c) 2000 World Wide Web Consortium, * (Massachusetts Institute of Technology, Institut National de * Recherche en Informatique et en Automatique, Keio University). All * Rights Reserved. This program is distributed under the W3C's Software * Intellectual Property License. This program is distributed in the * hope that it will be useful, but WITHOUT ANY WARRANTY; without even * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR * PURPOSE. * See W3C License http://www.w3.org/Consortium/Legal/ for more details. * * $Id: ChildSelectorImpl.java,v 1.2 2000/07/27 21:32:26 plehegar Exp $ */ package org.w3c.flute.parser.selectors; import org.w3c.css.sac.DescendantSelector; import org.w3c.css.sac.Selector; import org.w3c.css.sac.SimpleSelector; /** * @version $Revision: 1.2 $ * @author Philippe Le Hegaret */ public class ChildSelectorImpl implements DescendantSelector { Selector parent; SimpleSelector child; /** An integer indicating the type of <code>Selector</code> */ public short getSelectorType() { return Selector.SAC_CHILD_SELECTOR; } /** Creates a new ChildSelectorImpl */ public ChildSelectorImpl(Selector parent, SimpleSelector child) { this.parent = parent; this.child = child; } /** Returns the parent selector. */ public Selector getAncestorSelector() { return parent; } /* * Returns the simple selector. */ public SimpleSelector getSimpleSelector() { return child; } }
package kore.botssdk.view.tableview; import android.content.Context; import android.content.res.Resources; import android.util.Log; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.ArrayAdapter; import android.widget.LinearLayout; import android.widget.TextView; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import kore.botssdk.view.tableview.model.MiniTableModel; import kore.botssdk.view.tableview.model.TableColumnModel; import kore.botssdk.view.tableview.model.TableColumnWeightModel; import kore.botssdk.view.tableview.providers.TableDataRowBackgroundProvider; import static android.widget.LinearLayout.LayoutParams; /** * The abstract implementation of an adapter used to bring data to a {@link TableView}. * * @author ISchwarz */ public abstract class TableDataAdapter<T> extends ArrayAdapter<T> { private static final String LOG_TAG = TableDataAdapter.class.getName(); private final List<T> data; private TableColumnModel columnModel; private TableDataRowBackgroundProvider<? super T> rowBackgroundProvider; /** * Creates a new TableDataAdapter. * * @param context The context that shall be used. * @param data The data that shall be displayed. */ public TableDataAdapter(final Context context, final T[] data) { this(context, 0, new ArrayList<>(Arrays.asList(data))); } /** * Creates a new TableDataAdapter. * @param context The context that shall be used. * @param data The data that shall be displayed. */ public TableDataAdapter(final Context context, final List<MiniTableModel> data) { this(context, 0, (List<T>) data); } /** * Creates a new TableDataAdapter. (internally used) * * @param context The context that shall be used. * @param columnCount The number of columns. * @param data The data which shall be displayed in the table. */ protected TableDataAdapter(final Context context, final int columnCount, final List<T> data) { this(context, new TableColumnWeightModel(columnCount), data); } /** * Creates a new TableDataAdapter. (internally used) * * @param context The context that shall be used. * @param columnModel The column model to be used. * @param data The data which shall be displayed in the table. */ protected TableDataAdapter(final Context context, final TableColumnModel columnModel, final List<T> data) { super(context, -1, data); this.columnModel = columnModel; this.data = data; } /** * Gives the data object that shall be displayed in the row with the given index. * * @param rowIndex The index of the row to get the data for. * @return The data that shall be displayed in the row with the given index. */ public T getRowData(final int rowIndex) { return getItem(rowIndex); } /** * Gives the data that is set to this adapter. * * @return The data this adapter is currently working with. */ public List<T> getData() { return data; } /** * Gives the {@link Context} of this adapter. (Hint: use this method in the {@code getHeaderView()}-method * to programmatically initialize new views.) * * @return The {@link Context} of this adapter. */ public Context getContext() { return super.getContext(); } /** * Gives the {@link LayoutInflater} of this adapter. (Hint: use this method in the * {@code getHeaderView()}-method to inflate xml-layout-files.) * * @return The {@link LayoutInflater} of the context of this adapter. */ public LayoutInflater getLayoutInflater() { return (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE); } /** * Gives the {@link Resources} of this adapter. (Hint: use this method in the * {@code getCellView()}-method to resolve resources.) * * @return The {@link Resources} of the context of this adapter. */ public Resources getResources() { return getContext().getResources(); } /** * Method that gives the cell views for the different table cells. * * @param rowIndex The index of the row to return the table cell view. * @param columnIndex The index of the column to return the table cell view. * @param parentView The view to which the returned view will be added. * @return The created header view for the given column. */ public abstract View getCellView(int rowIndex, int columnIndex, ViewGroup parentView); @Override public final View getView(final int rowIndex, final View convertView, final ViewGroup parent) { final LinearLayout rowView = new LinearLayout(getContext()); final AbsListView.LayoutParams rowLayoutParams = new AbsListView.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); rowView.setLayoutParams(rowLayoutParams); rowView.setGravity(Gravity.CENTER_VERTICAL); T rowData = null; try { rowData = getItem(rowIndex); } catch (final IndexOutOfBoundsException e) { Log.w(LOG_TAG, "No row date available for row with index " + rowIndex + ". " + "Caught Exception: " + e.getMessage()); } if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) { rowView.setBackgroundDrawable(rowBackgroundProvider.getRowBackground(rowIndex, rowData)); } else { rowView.setBackground(rowBackgroundProvider.getRowBackground(rowIndex, rowData)); } final int tableWidth = parent.getWidth(); for (int columnIndex = 0; columnIndex < columnModel.getColumnCount(); columnIndex++) { View cellView = getCellView(rowIndex, columnIndex, rowView); if (cellView == null) { cellView = new TextView(getContext()); } final int cellWidth = columnModel.getColumnWidth(columnIndex, tableWidth); final LinearLayout.LayoutParams cellLayoutParams = new LinearLayout.LayoutParams(cellWidth, LinearLayout.LayoutParams.WRAP_CONTENT); cellView.setLayoutParams(cellLayoutParams); rowView.addView(cellView); } return rowView; } /** * Sets the {@link TableDataRowBackgroundProvider} that will be used to define the table data rows background. * * @param rowBackgroundProvider The {@link TableDataRowBackgroundProvider} that shall be used. */ protected void setRowBackgroundProvider(final TableDataRowBackgroundProvider<? super T> rowBackgroundProvider) { this.rowBackgroundProvider = rowBackgroundProvider; } /** * Gives the {@link TableColumnWeightModel} that is currently used to render the table headers. * * @return The {@link TableColumnModel} which is currently used.. */ protected TableColumnModel getColumnModel() { return columnModel; } /** * Sets the {@link TableColumnModel} that will be used to render the table cells. * * @param columnModel The {@link TableColumnModel} that should be set. */ protected void setColumnModel(final TableColumnModel columnModel) { this.columnModel = columnModel; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.cluster; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.helix.HelixAdmin; import org.apache.helix.HelixDataAccessor; import org.apache.helix.HelixException; import org.apache.helix.HelixManager; import org.apache.helix.PropertyKey; import org.apache.helix.manager.zk.ZKHelixManager; import org.apache.helix.model.HelixConfigScope; import org.apache.helix.model.InstanceConfig; import org.apache.helix.task.JobConfig; import org.apache.helix.task.TargetState; import org.apache.helix.task.TaskConfig; import org.apache.helix.task.TaskDriver; import org.apache.helix.task.TaskState; import org.apache.helix.task.TaskUtil; import org.apache.helix.task.Workflow; import org.apache.helix.task.WorkflowConfig; import org.apache.helix.task.WorkflowContext; import org.apache.helix.tools.ClusterSetup; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import lombok.extern.slf4j.Slf4j; import org.apache.gobblin.configuration.ConfigurationKeys; import org.apache.gobblin.runtime.JobException; import org.apache.gobblin.runtime.listeners.JobListener; import org.apache.gobblin.util.ConfigUtils; import static org.apache.helix.task.TaskState.STOPPED; /** * A utility class for working with Gobblin on Helix. * * @author Yinan Li */ @Slf4j public class HelixUtils { /** * Create a Helix cluster for the Gobblin Cluster application. * * @param zkConnectionString the ZooKeeper connection string * @param clusterName the Helix cluster name */ public static void createGobblinHelixCluster( String zkConnectionString, String clusterName) { createGobblinHelixCluster(zkConnectionString, clusterName, true); } /** * Create a Helix cluster for the Gobblin Cluster application. * * @param zkConnectionString the ZooKeeper connection string * @param clusterName the Helix cluster name * @param overwrite true to overwrite exiting cluster, false to reuse existing cluster */ public static void createGobblinHelixCluster( String zkConnectionString, String clusterName, boolean overwrite) { ClusterSetup clusterSetup = new ClusterSetup(zkConnectionString); // Create the cluster and overwrite if it already exists clusterSetup.addCluster(clusterName, overwrite); // Helix 0.6.x requires a configuration property to have the form key=value. String autoJoinConfig = ZKHelixManager.ALLOW_PARTICIPANT_AUTO_JOIN + "=true"; clusterSetup.setConfig(HelixConfigScope.ConfigScopeProperty.CLUSTER, clusterName, autoJoinConfig); } /** * Get a Helix instance name. * * @param namePrefix a prefix of Helix instance names * @param instanceId an integer instance ID * @return a Helix instance name that is a concatenation of the given prefix and instance ID */ public static String getHelixInstanceName( String namePrefix, int instanceId) { return namePrefix + "_" + instanceId; } // We have switched from Helix JobQueue to WorkFlow based job execution. @Deprecated public static void submitJobToQueue( JobConfig.Builder jobConfigBuilder, String queueName, String jobName, TaskDriver helixTaskDriver, HelixManager helixManager, long jobQueueDeleteTimeoutSeconds) throws Exception { submitJobToWorkFlow(jobConfigBuilder, queueName, jobName, helixTaskDriver, helixManager, jobQueueDeleteTimeoutSeconds); } static void waitJobInitialization( HelixManager helixManager, String workFlowName, String jobName, long timeoutMillis) throws Exception { WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); // If the helix job is deleted from some other thread or a completely external process, // method waitJobCompletion() needs to differentiate between the cases where // 1) workflowContext did not get initialized ever, in which case we need to keep waiting, or // 2) it did get initialized but deleted soon after, in which case we should stop waiting // To overcome this issue, we wait here till workflowContext gets initialized long start = System.currentTimeMillis(); while (workflowContext == null || workflowContext.getJobState(TaskUtil.getNamespacedJobName(workFlowName, jobName)) == null) { if (System.currentTimeMillis() - start > timeoutMillis) { log.error("Job cannot be initialized within {} milliseconds, considered as an error", timeoutMillis); throw new JobException("Job cannot be initialized within {} milliseconds, considered as an error"); } workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); Thread.sleep(1000); log.info("Waiting for work flow initialization."); } log.info("Work flow {} initialized", workFlowName); } public static void submitJobToWorkFlow(JobConfig.Builder jobConfigBuilder, String workFlowName, String jobName, TaskDriver helixTaskDriver, HelixManager helixManager, long workFlowExpiryTime) throws Exception { WorkflowConfig workFlowConfig = new WorkflowConfig.Builder().setExpiry(workFlowExpiryTime, TimeUnit.SECONDS).build(); // Create a work flow for each job with the name being the queue name Workflow workFlow = new Workflow.Builder(workFlowName).setWorkflowConfig(workFlowConfig).addJob(jobName, jobConfigBuilder).build(); // start the workflow helixTaskDriver.start(workFlow); log.info("Created a work flow {}", workFlowName); waitJobInitialization(helixManager, workFlowName, jobName, Long.MAX_VALUE); } static void waitJobCompletion(HelixManager helixManager, String workFlowName, String jobName, Optional<Long> timeoutInSeconds, Long stoppingStateTimeoutInSeconds) throws InterruptedException, TimeoutException { log.info("Waiting for job {} to complete...", jobName); long endTime = 0; long currentTimeMillis = System.currentTimeMillis(); if (timeoutInSeconds.isPresent()) { endTime = currentTimeMillis + timeoutInSeconds.get() * 1000; } long stoppingStateEndTime = currentTimeMillis + stoppingStateTimeoutInSeconds * 1000; while (!timeoutInSeconds.isPresent() || System.currentTimeMillis() <= endTime) { WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); if (workflowContext != null) { TaskState jobState = workflowContext.getJobState(TaskUtil.getNamespacedJobName(workFlowName, jobName)); switch (jobState) { case STOPPED: // user requested cancellation, which is executed by executeCancellation() log.info("Job {} is cancelled, it will be deleted now.", jobName); HelixUtils.deleteStoppedHelixJob(helixManager, workFlowName, jobName); return; case FAILED: case COMPLETED: return; case STOPPING: log.info("Waiting for job {} to complete... State - {}", jobName, jobState); Thread.sleep(1000); // Workaround for a Helix bug where a job may be stuck in the STOPPING state due to an unresponsive task. if (System.currentTimeMillis() > stoppingStateEndTime) { log.info("Deleting workflow {}", workFlowName); new TaskDriver(helixManager).delete(workFlowName); log.info("Deleted workflow {}", workFlowName); } return; default: log.info("Waiting for job {} to complete... State - {}", jobName, jobState); Thread.sleep(1000); } } else { // We have waited for WorkflowContext to get initialized, // so it is found null here, it must have been deleted in job cancellation process. log.info("WorkflowContext not found. Job is probably cancelled."); return; } } throw new TimeoutException("task driver wait time [" + timeoutInSeconds + " sec] is expired."); } static boolean isJobFinished(String workflowName, String jobName, HelixManager helixManager) { WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workflowName); if (workflowContext == null) { // this workflow context doesn't exist, considered as finished. return true; } TaskState jobState = workflowContext.getJobState(TaskUtil.getNamespacedJobName(workflowName, jobName)); switch (jobState) { case STOPPED: case FAILED: case COMPLETED: case ABORTED: case TIMED_OUT: return true; default: return false; } } static void deleteWorkflow (String workflowName, HelixManager helixManager, long timeOut) throws InterruptedException { TaskDriver taskDriver = new TaskDriver(helixManager); taskDriver.deleteAndWaitForCompletion(workflowName, timeOut); } static void handleJobTimeout(String workFlowName, String jobName, HelixManager helixManager, Object jobLauncher, JobListener jobListener) throws InterruptedException { try { log.warn("Timeout occurred for job launcher {} with job {}", jobLauncher.getClass(), jobName); if (jobLauncher instanceof GobblinHelixJobLauncher) { ((GobblinHelixJobLauncher) jobLauncher).cancelJob(jobListener); } else if (jobLauncher instanceof GobblinHelixDistributeJobExecutionLauncher) { ((GobblinHelixDistributeJobExecutionLauncher) jobLauncher).cancel(); } } catch (JobException e) { throw new RuntimeException("Unable to cancel job " + jobName + ": ", e); } // TODO : fix this when HELIX-1180 is completed // We should not be deleting a workflow explicitly. // Workflow state should be set to a final state, which will remove it automatically because expiry time is set. // After that, all delete calls can be replaced by something like HelixUtils.setStateToFinal(); HelixUtils.deleteStoppedHelixJob(helixManager, workFlowName, jobName); log.info("Stopped and deleted the workflow {}", workFlowName); } /** * Deletes the stopped Helix Workflow. * Caller should stop the Workflow before calling this method. * @param helixManager helix manager * @param workFlowName workflow needed to be deleted * @param jobName helix job name * @throws InterruptedException */ private static void deleteStoppedHelixJob(HelixManager helixManager, String workFlowName, String jobName) throws InterruptedException { WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); while (workflowContext.getJobState(TaskUtil.getNamespacedJobName(workFlowName, jobName)) != STOPPED) { log.info("Waiting for job {} to stop...", jobName); workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); Thread.sleep(1000); } // deleting the entire workflow, as one workflow contains only one job new TaskDriver(helixManager).deleteAndWaitForCompletion(workFlowName, 10000L); log.info("Workflow deleted."); } /** * Returns the currently running Helix Workflow Ids given an {@link Iterable} of Gobblin job names. The method returns a * {@link java.util.Map} from Gobblin job name to the corresponding Helix Workflow Id. This method iterates * over all Helix workflows, and obtains the jobs of each workflow from its jobDag. * * NOTE: This call is expensive as it results in listing of znodes and subsequently, multiple ZK calls to get the job * configuration for each HelixJob. Ideally, this method should be called infrequently e.g. when a job is deleted/cancelled. * * @param jobNames a list of Gobblin job names. * @return a map from jobNames to their Helix Workflow Ids. */ public static Map<String, String> getWorkflowIdsFromJobNames(HelixManager helixManager, Collection<String> jobNames) { Map<String, String> jobNameToWorkflowId = new HashMap<>(); TaskDriver taskDriver = new TaskDriver(helixManager); Map<String, WorkflowConfig> workflowConfigMap = taskDriver.getWorkflows(); for (String workflow : workflowConfigMap.keySet()) { WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflow); //Filter out any stale Helix workflows which are not running. if (workflowConfig.getTargetState() != TargetState.START) { continue; } Set<String> helixJobs = workflowConfig.getJobDag().getAllNodes(); for (String helixJob : helixJobs) { Iterator<TaskConfig> taskConfigIterator = taskDriver.getJobConfig(helixJob).getTaskConfigMap().values().iterator(); if (taskConfigIterator.hasNext()) { TaskConfig taskConfig = taskConfigIterator.next(); String jobName = taskConfig.getConfigMap().get(ConfigurationKeys.JOB_NAME_KEY); if (jobNames.contains(jobName)) { if (!jobNameToWorkflowId.containsKey(jobName)) { jobNameToWorkflowId.put(jobName, workflow); } else { log.warn("JobName {} previously found to have WorkflowId {}; found " + " a different WorkflowId {} for the job; " + "Skipping this entry", jobName, jobNameToWorkflowId.get(jobName), workflow); } break; } } } } return jobNameToWorkflowId; } /** * Return the system properties from the input {@link Config} instance * @param config */ public static void setSystemProperties(Config config) { Properties properties = ConfigUtils.configToProperties(ConfigUtils.getConfig(config, GobblinClusterConfigurationKeys.GOBBLIN_CLUSTER_SYSTEM_PROPERTY_PREFIX, ConfigFactory.empty())); for (Map.Entry<Object, Object> entry: properties.entrySet()) { System.setProperty(entry.getKey().toString(), entry.getValue().toString()); } } /** * A utility method that returns all current live instances in a given Helix cluster. This method assumes that * the passed {@link HelixManager} instance is already connected. * @param helixManager * @return all live instances in the Helix cluster. */ public static List<String> getLiveInstances(HelixManager helixManager) { HelixDataAccessor accessor = helixManager.getHelixDataAccessor(); PropertyKey liveInstancesKey = accessor.keyBuilder().liveInstances(); return accessor.getChildNames(liveInstancesKey); } public static boolean isInstanceLive(HelixManager helixManager, String instanceName) { HelixDataAccessor accessor = helixManager.getHelixDataAccessor(); PropertyKey liveInstanceKey = accessor.keyBuilder().liveInstance(instanceName); return accessor.getProperty(liveInstanceKey) != null; } public static void dropInstanceIfExists(HelixAdmin admin, String clusterName, String helixInstanceName) { try { admin.dropInstance(clusterName, new InstanceConfig(helixInstanceName)); } catch (HelixException e) { log.error("Could not drop instance: {} due to: {}", helixInstanceName, e); } } }
/* * Copyright (c) 1998 - 2011. University Corporation for Atmospheric Research/Unidata * Portions of this software were developed by the Unidata Program at the * University Corporation for Atmospheric Research. * * Access and use of this software shall impose the following obligations * and understandings on the user. The user is granted the right, without * any fee or cost, to use, copy, modify, alter, enhance and distribute * this software, and any derivative works thereof, and its supporting * documentation for any purpose whatsoever, provided that this entire * notice appears in all copies of the software, derivative works and * supporting documentation. Further, UCAR requests that the user credit * UCAR/Unidata in any publications that result from the use of this * software or in any product that includes this software. The names UCAR * and/or Unidata, however, may not be used in any advertising or publicity * to endorse or promote any products or commercial entity unless specific * written permission is obtained from UCAR/Unidata. The user also * understands that UCAR/Unidata is not obligated to provide the user with * any support, consulting, training or assistance of any kind with regard * to the use, operation and performance of this software nor to provide * the user with any updates, revisions, new versions or "bug fixes." * * THIS SOFTWARE IS PROVIDED BY UCAR/UNIDATA "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL UCAR/UNIDATA BE LIABLE FOR ANY SPECIAL, * INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING * FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, * NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION * WITH THE ACCESS, USE OR PERFORMANCE OF THIS SOFTWARE. */ package ucar.nc2.grib.grib1; /** * Helper class for pre-defined grid definition section (GDS) . * These are NCEP. * * @see "http://www.nco.ncep.noaa.gov/pmb/docs/on388/tableb.html" */ public class Grib1GdsPredefined { private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Grib1GdsPredefined.class); /** * Constructs a Grib1Gds object from a pds and predefined tables. * * @param center center id * @param gridNumber from pds (octet 7) * @return predefined GDS */ public static Grib1Gds factory(int center, int gridNumber) { if (center == 7) { return factoryNCEP(gridNumber); } else throw new IllegalArgumentException("Dont have predefined GDS " + gridNumber + " from " + center); } // 21-26, 61-64: International Exchange and Family of Services (FOS) grids. So may be more general than NCEP private static Grib1Gds factoryNCEP(int gridNumber) { switch (gridNumber) { case 21: return new NcepLatLon(gridNumber, 37, 36, 0.0F, 0.0F, 90.0F, 180.0F, 5.0F, 2.5F, (byte) 0x88, (byte) 64); case 22: return new NcepLatLon(gridNumber, 37, 36, 0.0F, -180.0F, 90.0F, 0.0F, 5.0F, 2.5F, (byte) 0x88, (byte) 64); case 23: return new NcepLatLon(gridNumber, 37, 36, -90.0F, 0.0F, 180.0F, 0.0F, 5.0F, 2.5F, (byte) 0x88, (byte) 64); case 24: return new NcepLatLon(gridNumber, 37, 36, -90.0F, -180.0F, 0.0F, 0.0F, 5.0F, 2.5F, (byte) 0x88, (byte) 64); case 25: return new NcepLatLon(gridNumber, 72, 18, 0.0F, 0.0F, 90.0F, 355.0F, 5.0F, 5.0F, (byte) 0x88, (byte) 64); case 26: return new NcepLatLon(gridNumber, 72, 18, -90.0F, 0.0F, 0.0F, 355.0F, 5.0F, 5.0F, (byte) 0x88, (byte) 64); case 61: return new NcepLatLon(gridNumber, 91, 45, 0.0F, 0.0F, 90.0F, 180.0F, 2.0F, 2.0F, (byte) 0x88, (byte) 64); case 62: return new NcepLatLon(gridNumber, 91, 45, -90.0F, 0.0F, 0.0F, 180.0F, 2.0F, 2.0F, (byte) 0x88, (byte) 64); case 63: return new NcepLatLon(gridNumber, 91, 45, -90.0F, 0.0F, 0.0F, 180.0F, 2.0F, 2.0F, (byte) 0x88, (byte) 64); case 64: return new NcepLatLon(gridNumber, 91, 45, -90.0F, -180.0F, 0.0F, 0.0F, 2.0F, 2.0F, (byte) 0x88, (byte) 64); case 87: return new NcepPS(gridNumber, 81, 62, 22.8756F, 239.5089F, 255.0F, 68153.0F, 68153.0F, (byte) 0x08, (byte) 64); } throw new IllegalArgumentException("Dont have predefined GDS " + gridNumber + " from NCEP (center 7)"); } private static class NcepLatLon extends Grib1Gds.LatLon { NcepLatLon(int gridNumber, int nx, int ny, float la1, float lo1, float la2, float lo2, float deltaLon, float deltaLat, byte resolution, byte scan) { super(1000 * gridNumber); this.nx = nx; this.ny = ny; this.la1 = la1; this.lo1 = lo1; this.la2 = la2; this.lo2 = lo2; this.deltaLon = deltaLon; this.deltaLat = deltaLat; this.resolution = resolution; this.scanMode = scan; } } private static class NcepPS extends Grib1Gds.PolarStereographic { NcepPS(int gridNumber, int nx, int ny, float la1, float lo1, float lov, float dX, float dY, byte resolution, byte scan) { super(1000 * gridNumber); this.nx = nx; this.ny = ny; this.la1 = la1; this.lo1 = lo1; this.lov = lov; this.dX = dX; this.dY = dY; this.resolution = resolution; this.scanMode = scan; } } }
package com.linkedin.dagli.transformer; import com.linkedin.dagli.placeholder.Placeholder; import com.linkedin.dagli.producer.ProducerType; import com.linkedin.dagli.transformer.internal.PreparedTransformerInternalAPI; import java.util.function.Function; /** * Transformers take at least one {@link com.linkedin.dagli.preparer.Preparer} input(s) and produce a single output * (though this output may itself be a tuple, list, etc.) They are the core conceit of Dagli, comprising all the nodes * of the computational directed acyclic graph other than the roots. * * PreparedTransformers can be applied to their inputs without further preparation to produce an output. Prepared * transformers extend {@link PreparableTransformer}s; you can re-prepare a prepared transformer, but this is unusual * and the result is implementation specific (often just resulting in the original prepared transformer.) * * Note: implementations of PreparedTransformers should generally extend one of the AbstractPreparedTransformerX * classes, not implement this interface directly! * * @param <R> the type of output */ public interface PreparedTransformer<R> extends Transformer<R>, ProducerType<R, PreparedTransformer<R>> { @Override PreparedTransformerInternalAPI<R, ? extends PreparedTransformer<R>> internalAPI(); /** * Casts a producer to an effective "supertype" interface. The semantics of the producer guarantee that the returned * type is valid for the instance. * * Note that although this and other {@code cast(...)} methods are safe, this safety extends only to the * interfaces for which they are implemented. The covariance and contravariance relationships existing for these interfaces * do not necessarily hold for their derived classes. For example, a {@code PreparedTransformer<String>} is also a * {@code PreparedTransformer<Object>}, but a {@code MyTransformer<String>} cannot necessarily be safely treated as a * {@code MyTransformer<Object>}. * * @param transformer the transformer to cast * @param <R> the type of result of the returned transformer * @return the passed transformer, typed to a new "supertype" of the original */ @SuppressWarnings("unchecked") static <R> PreparedTransformer<R> cast(PreparedTransformer<? extends R> transformer) { return (PreparedTransformer<R>) transformer; } /** * Creates a new {@link MappedIterable.Prepared} that will obtain a prepared transformer from the given "factory * function", which will almost always be a {@code withInput(...)}-type method. * * For example, given a hypothetical {@code Concatenation} transformer that concatenates its two String inputs * provided as {@code Concatenation::withInputA(...)} and {@code Concatenation::withInputB(...)}, and wanted to * concatenate the String {@code "PREFIX"} to every String in lists of Strings, we could write something like: * <pre>{@code * Placeholder<List<String>> stringList = new Placeholder<>(); * Concatenation prefixedString = new Concatenation().withInputA(new Constant<>("PREFIX")); * MappedIterable.Prepared<String, String> prefixedStrings = * PreparedTransformer.mapped(prefixedString::withInputB).withMappedInput(stringList); * }</pre> * * @param preparedWithMappedInputFunction the prepared transformer to wrap * @return a copy of this instance that will wrap the specified transformer */ static <T, R, Q extends PreparedTransformer<? extends R>> MappedIterable.Prepared<T, R> mapped( Function<? super Placeholder<T>, Q> preparedWithMappedInputFunction) { return new MappedIterable.Prepared<>(preparedWithMappedInputFunction); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.ttl; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SimpleCollector; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fieldvisitor.UidAndRoutingFieldsVisitor; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMappers; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.settings.NodeSettingsService; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; /** * A node level service that delete expired docs on node primary shards. */ public class IndicesTTLService extends AbstractLifecycleComponent<IndicesTTLService> { public static final String INDICES_TTL_INTERVAL = "indices.ttl.interval"; public static final String INDEX_TTL_DISABLE_PURGE = "index.ttl.disable_purge"; private final ClusterService clusterService; private final IndicesService indicesService; private final TransportBulkAction bulkAction; private final int bulkSize; private PurgerThread purgerThread; @Inject public IndicesTTLService(Settings settings, ClusterService clusterService, IndicesService indicesService, NodeSettingsService nodeSettingsService, TransportBulkAction bulkAction) { super(settings); this.clusterService = clusterService; this.indicesService = indicesService; TimeValue interval = this.settings.getAsTime("indices.ttl.interval", TimeValue.timeValueSeconds(60)); this.bulkAction = bulkAction; this.bulkSize = this.settings.getAsInt("indices.ttl.bulk_size", 10000); this.purgerThread = new PurgerThread(EsExecutors.threadName(settings, "[ttl_expire]"), interval); nodeSettingsService.addListener(new ApplySettings()); } @Override protected void doStart() throws ElasticsearchException { this.purgerThread.start(); } @Override protected void doStop() throws ElasticsearchException { try { this.purgerThread.shutdown(); } catch (InterruptedException e) { Thread.interrupted(); } } @Override protected void doClose() throws ElasticsearchException { } private class PurgerThread extends Thread { private final AtomicBoolean running = new AtomicBoolean(true); private final Notifier notifier; private final CountDownLatch shutdownLatch = new CountDownLatch(1); public PurgerThread(String name, TimeValue interval) { super(name); setDaemon(true); this.notifier = new Notifier(interval); } public void shutdown() throws InterruptedException { if (running.compareAndSet(true, false)) { notifier.doNotify(); shutdownLatch.await(); } } public void resetInterval(TimeValue interval) { notifier.setTimeout(interval); } @Override public void run() { try { while (running.get()) { try { List<IndexShard> shardsToPurge = getShardsToPurge(); purgeShards(shardsToPurge); } catch (Throwable e) { if (running.get()) { logger.warn("failed to execute ttl purge", e); } } if (running.get()) { notifier.await(); } } } finally { shutdownLatch.countDown(); } } /** * Returns the shards to purge, i.e. the local started primary shards that have ttl enabled and disable_purge to false */ private List<IndexShard> getShardsToPurge() { List<IndexShard> shardsToPurge = new ArrayList<>(); MetaData metaData = clusterService.state().metaData(); for (IndexService indexService : indicesService) { // check the value of disable_purge for this index IndexMetaData indexMetaData = metaData.index(indexService.index().name()); if (indexMetaData == null) { continue; } boolean disablePurge = indexMetaData.settings().getAsBoolean(INDEX_TTL_DISABLE_PURGE, false); if (disablePurge) { continue; } // should be optimized with the hasTTL flag FieldMappers ttlFieldMappers = indexService.mapperService().fullName(TTLFieldMapper.NAME); if (ttlFieldMappers == null) { continue; } // check if ttl is enabled for at least one type of this index boolean hasTTLEnabled = false; for (FieldMapper ttlFieldMapper : ttlFieldMappers) { if (((TTLFieldMapper) ttlFieldMapper).enabled()) { hasTTLEnabled = true; break; } } if (hasTTLEnabled) { for (IndexShard indexShard : indexService) { if (indexShard.state() == IndexShardState.STARTED && indexShard.routingEntry().primary() && indexShard.routingEntry().started()) { shardsToPurge.add(indexShard); } } } } return shardsToPurge; } public TimeValue getInterval() { return notifier.getTimeout(); } } private void purgeShards(List<IndexShard> shardsToPurge) { for (IndexShard shardToPurge : shardsToPurge) { Query query = shardToPurge.indexService().mapperService().smartNameFieldMapper(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true, null); Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl"); try { logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id()); ExpiredDocsCollector expiredDocsCollector = new ExpiredDocsCollector(); searcher.searcher().search(query, expiredDocsCollector); List<DocToPurge> docsToPurge = expiredDocsCollector.getDocsToPurge(); BulkRequest bulkRequest = new BulkRequest(); for (DocToPurge docToPurge : docsToPurge) { bulkRequest.add(new DeleteRequest().index(shardToPurge.routingEntry().index()).type(docToPurge.type).id(docToPurge.id).version(docToPurge.version).routing(docToPurge.routing)); bulkRequest = processBulkIfNeeded(bulkRequest, false); } processBulkIfNeeded(bulkRequest, true); } catch (Exception e) { logger.warn("failed to purge", e); } finally { searcher.close(); } } } private static class DocToPurge { public final String type; public final String id; public final long version; public final String routing; public DocToPurge(String type, String id, long version, String routing) { this.type = type; this.id = id; this.version = version; this.routing = routing; } } private class ExpiredDocsCollector extends SimpleCollector { private LeafReaderContext context; private List<DocToPurge> docsToPurge = new ArrayList<>(); public ExpiredDocsCollector() { } @Override public void setScorer(Scorer scorer) { } @Override public boolean needsScores() { return false; } @Override public void collect(int doc) { try { UidAndRoutingFieldsVisitor fieldsVisitor = new UidAndRoutingFieldsVisitor(); context.reader().document(doc, fieldsVisitor); Uid uid = fieldsVisitor.uid(); final long version = Versions.loadVersion(context.reader(), new Term(UidFieldMapper.NAME, uid.toBytesRef())); docsToPurge.add(new DocToPurge(uid.type(), uid.id(), version, fieldsVisitor.routing())); } catch (Exception e) { logger.trace("failed to collect doc", e); } } @Override public void doSetNextReader(LeafReaderContext context) throws IOException { this.context = context; } public List<DocToPurge> getDocsToPurge() { return this.docsToPurge; } } private BulkRequest processBulkIfNeeded(BulkRequest bulkRequest, boolean force) { if ((force && bulkRequest.numberOfActions() > 0) || bulkRequest.numberOfActions() >= bulkSize) { try { bulkAction.executeBulk(bulkRequest, new ActionListener<BulkResponse>() { @Override public void onResponse(BulkResponse bulkResponse) { logger.trace("bulk took " + bulkResponse.getTookInMillis() + "ms"); } @Override public void onFailure(Throwable e) { logger.warn("failed to execute bulk"); } }); } catch (Exception e) { logger.warn("failed to process bulk", e); } bulkRequest = new BulkRequest(); } return bulkRequest; } class ApplySettings implements NodeSettingsService.Listener { @Override public void onRefreshSettings(Settings settings) { final TimeValue currentInterval = IndicesTTLService.this.purgerThread.getInterval(); final TimeValue interval = settings.getAsTime(INDICES_TTL_INTERVAL, currentInterval); if (!interval.equals(currentInterval)) { logger.info("updating indices.ttl.interval from [{}] to [{}]",currentInterval, interval); IndicesTTLService.this.purgerThread.resetInterval(interval); } } } private static final class Notifier { private final ReentrantLock lock = new ReentrantLock(); private final Condition condition = lock.newCondition(); private volatile TimeValue timeout; public Notifier(TimeValue timeout) { assert timeout != null; this.timeout = timeout; } public void await() { lock.lock(); try { condition.await(timeout.millis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.interrupted(); } finally { lock.unlock(); } } public void setTimeout(TimeValue timeout) { assert timeout != null; this.timeout = timeout; doNotify(); } public TimeValue getTimeout() { return timeout; } public void doNotify() { lock.lock(); try { condition.signalAll(); } finally { lock.unlock(); } } } }
package com.jagsaund.rxuploader.sample.model.wire; import android.support.annotation.NonNull; import com.google.gson.TypeAdapterFactory; import com.ryanharter.auto.value.gson.GsonTypeAdapterFactory; @GsonTypeAdapterFactory public abstract class JSONModelTypeAdapterFactory implements TypeAdapterFactory { @NonNull public static TypeAdapterFactory create() { return new AutoValueGson_JSONModelTypeAdapterFactory(); } }
package p; import q.S; public class A { S s= new S(); class B { public B(){ Secondary sec= new Secondary(); sec.f(s); } } }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.content.browser; import android.content.Context; import android.content.res.Configuration; import android.os.Parcel; import android.os.Parcelable; import android.text.format.DateUtils; import android.util.AttributeSet; import android.util.SparseArray; import android.view.LayoutInflater; import android.view.accessibility.AccessibilityEvent; import android.widget.DatePicker; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.NumberPicker; import android.widget.NumberPicker.OnValueChangeListener; import java.util.Arrays; import java.util.Calendar; import java.util.Locale; import java.util.TimeZone; import org.chromium.content.app.AppResource; // This class is heavily based on android.widget.DatePicker. public class MonthPicker extends FrameLayout { private static final int DEFAULT_START_YEAR = 1900; private static final int DEFAULT_END_YEAR = 2100; private static final boolean DEFAULT_ENABLED_STATE = true; private final LinearLayout mSpinners; private final NumberPicker mMonthSpinner; private final NumberPicker mYearSpinner; private Locale mCurrentLocale; private OnMonthChangedListener mMonthChangedListener; private String[] mShortMonths; private int mNumberOfMonths; private Calendar mTempDate; private Calendar mMinDate; private Calendar mMaxDate; private Calendar mCurrentDate; private boolean mIsEnabled = DEFAULT_ENABLED_STATE; /** * The callback used to indicate the user changes\d the date. */ public interface OnMonthChangedListener { /** * Called upon a date change. * * @param view The view associated with this listener. * @param year The year that was set. * @param monthOfYear The month that was set (0-11) for compatibility * with {@link java.util.Calendar}. */ void onMonthChanged(MonthPicker view, int year, int monthOfYear); } public MonthPicker(Context context) { this(context, null); } public MonthPicker(Context context, AttributeSet attrs) { this(context, attrs, android.R.attr.datePickerStyle); } public MonthPicker(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); // initialization based on locale setCurrentLocale(Locale.getDefault()); int startYear = DEFAULT_START_YEAR; int endYear = DEFAULT_END_YEAR; LayoutInflater inflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); assert AppResource.LAYOUT_MONTH_PICKER != 0; inflater.inflate(AppResource.LAYOUT_MONTH_PICKER, this, true); OnValueChangeListener onChangeListener = new OnValueChangeListener() { public void onValueChange(NumberPicker picker, int oldVal, int newVal) { mTempDate.setTimeInMillis(mCurrentDate.getTimeInMillis()); // take care of wrapping of days and months to update greater fields if (picker == mMonthSpinner) { if (oldVal == 11 && newVal == 0) { mTempDate.add(Calendar.MONTH, 1); } else if (oldVal == 0 && newVal == 11) { mTempDate.add(Calendar.MONTH, -1); } else { mTempDate.add(Calendar.MONTH, newVal - oldVal); } } else if (picker == mYearSpinner) { mTempDate.set(Calendar.YEAR, newVal); } else { throw new IllegalArgumentException(); } // now set the date to the adjusted one setDate(mTempDate.get(Calendar.YEAR), mTempDate.get(Calendar.MONTH)); updateSpinners(); notifyDateChanged(); } }; assert AppResource.ID_MONTH_YEAR_PICKERS_CONTAINER != 0; mSpinners = (LinearLayout) findViewById(AppResource.ID_MONTH_YEAR_PICKERS_CONTAINER); // month assert AppResource.ID_MONTH_PICKER != 0; mMonthSpinner = (NumberPicker) findViewById(AppResource.ID_MONTH_PICKER); mMonthSpinner.setMinValue(0); mMonthSpinner.setMaxValue(mNumberOfMonths - 1); mMonthSpinner.setDisplayedValues(mShortMonths); mMonthSpinner.setOnLongPressUpdateInterval(200); mMonthSpinner.setOnValueChangedListener(onChangeListener); // year assert AppResource.ID_YEAR_PICKER != 0; mYearSpinner = (NumberPicker) findViewById(AppResource.ID_YEAR_PICKER); mYearSpinner.setOnLongPressUpdateInterval(100); mYearSpinner.setOnValueChangedListener(onChangeListener); mTempDate.clear(); mTempDate.set(startYear, 0, 1); setMinDate(mTempDate.getTimeInMillis()); mTempDate.clear(); mTempDate.set(endYear, 11, 31); setMaxDate(mTempDate.getTimeInMillis()); // initialize to current date mCurrentDate.setTimeInMillis(System.currentTimeMillis()); init(mCurrentDate.get(Calendar.YEAR), mCurrentDate.get(Calendar.MONTH), null); } /** * Gets the minimal date supported by this {@link DatePicker} in * milliseconds since January 1, 1970 00:00:00 in * {@link TimeZone#getDefault()} time zone. * <p> * Note: The default minimal date is 01/01/1900. * <p> * * @return The minimal supported date. */ public long getMinDate() { return mMinDate.getTimeInMillis(); } /** * Sets the minimal date supported by this {@link NumberPicker} in * milliseconds since January 1, 1970 00:00:00 in * {@link TimeZone#getDefault()} time zone. * * @param minDate The minimal supported date. */ public void setMinDate(long minDate) { mTempDate.setTimeInMillis(minDate); if (mTempDate.get(Calendar.YEAR) == mMinDate.get(Calendar.YEAR) && mTempDate.get(Calendar.DAY_OF_YEAR) != mMinDate.get(Calendar.DAY_OF_YEAR)) { return; } mMinDate.setTimeInMillis(minDate); if (mCurrentDate.before(mMinDate)) { mCurrentDate.setTimeInMillis(mMinDate.getTimeInMillis()); } updateSpinners(); } /** * Gets the maximal date supported by this {@link DatePicker} in * milliseconds since January 1, 1970 00:00:00 in * {@link TimeZone#getDefault()} time zone. * <p> * Note: The default maximal date is 12/31/2100. * <p> * * @return The maximal supported date. */ public long getMaxDate() { return mMaxDate.getTimeInMillis(); } /** * Sets the maximal date supported by this {@link DatePicker} in * milliseconds since January 1, 1970 00:00:00 in * {@link TimeZone#getDefault()} time zone. * * @param maxDate The maximal supported date. */ public void setMaxDate(long maxDate) { mTempDate.setTimeInMillis(maxDate); if (mTempDate.get(Calendar.YEAR) == mMaxDate.get(Calendar.YEAR) && mTempDate.get(Calendar.DAY_OF_YEAR) != mMaxDate.get(Calendar.DAY_OF_YEAR)) { return; } mMaxDate.setTimeInMillis(maxDate); if (mCurrentDate.after(mMaxDate)) { mCurrentDate.setTimeInMillis(mMaxDate.getTimeInMillis()); } updateSpinners(); } @Override public void setEnabled(boolean enabled) { if (mIsEnabled == enabled) { return; } super.setEnabled(enabled); mMonthSpinner.setEnabled(enabled); mYearSpinner.setEnabled(enabled); mIsEnabled = enabled; } @Override public boolean isEnabled() { return mIsEnabled; } @Override public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) { onPopulateAccessibilityEvent(event); return true; } @Override public void onPopulateAccessibilityEvent(AccessibilityEvent event) { super.onPopulateAccessibilityEvent(event); final int flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_YEAR; String selectedDateUtterance = DateUtils.formatDateTime(getContext(), mCurrentDate.getTimeInMillis(), flags); event.getText().add(selectedDateUtterance); } @Override protected void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); setCurrentLocale(newConfig.locale); } /** * Sets the current locale. * * @param locale The current locale. */ private void setCurrentLocale(Locale locale) { if (locale.equals(mCurrentLocale)) { return; } mCurrentLocale = locale; mTempDate = getCalendarForLocale(mTempDate, locale); mMinDate = getCalendarForLocale(mMinDate, locale); mMaxDate = getCalendarForLocale(mMaxDate, locale); mCurrentDate = getCalendarForLocale(mCurrentDate, locale); mNumberOfMonths = mTempDate.getActualMaximum(Calendar.MONTH) + 1; mShortMonths = new String[mNumberOfMonths]; for (int i = 0; i < mNumberOfMonths; i++) { mShortMonths[i] = DateUtils.getMonthString(Calendar.JANUARY + i, DateUtils.LENGTH_MEDIUM); } } /** * Gets a calendar for locale bootstrapped with the value of a given calendar. * * @param oldCalendar The old calendar. * @param locale The locale. */ private Calendar getCalendarForLocale(Calendar oldCalendar, Locale locale) { if (oldCalendar == null) { return Calendar.getInstance(locale); } else { final long currentTimeMillis = oldCalendar.getTimeInMillis(); Calendar newCalendar = Calendar.getInstance(locale); newCalendar.setTimeInMillis(currentTimeMillis); return newCalendar; } } /** * Updates the current date. * * @param year The year. * @param month The month which is <strong>starting from zero</strong>. */ public void updateMonth(int year, int month) { if (!isNewDate(year, month)) { return; } setDate(year, month); updateSpinners(); notifyDateChanged(); } // Override so we are in complete control of save / restore for this widget. @Override protected void dispatchRestoreInstanceState(SparseArray<Parcelable> container) { dispatchThawSelfOnly(container); } @Override protected Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); return new SavedState(superState, getYear(), getMonth()); } @Override protected void onRestoreInstanceState(Parcelable state) { SavedState ss = (SavedState) state; super.onRestoreInstanceState(ss.getSuperState()); setDate(ss.mYear, ss.mMonth); updateSpinners(); } /** * Initialize the state. If the provided values designate an inconsistent * date the values are normalized before updating the spinners. * * @param year The initial year. * @param monthOfYear The initial month <strong>starting from zero</strong>. * @param onMonthChangedListener How user is notified date is changed by * user, can be null. */ public void init(int year, int monthOfYear, OnMonthChangedListener onMonthChangedListener) { setDate(year, monthOfYear); updateSpinners(); mMonthChangedListener = onMonthChangedListener; } private boolean isNewDate(int year, int month) { return (mCurrentDate.get(Calendar.YEAR) != year || mCurrentDate.get(Calendar.MONTH) != month); } private void setDate(int year, int month) { mCurrentDate.set(year, month, 1); if (mCurrentDate.before(mMinDate)) { mCurrentDate.setTimeInMillis(mMinDate.getTimeInMillis()); } else if (mCurrentDate.after(mMaxDate)) { mCurrentDate.setTimeInMillis(mMaxDate.getTimeInMillis()); } } private void updateSpinners() { // set the spinner ranges respecting the min and max dates if (mCurrentDate.equals(mMinDate)) { mMonthSpinner.setDisplayedValues(null); mMonthSpinner.setMinValue(mCurrentDate.get(Calendar.MONTH)); mMonthSpinner.setMaxValue(mCurrentDate.getActualMaximum(Calendar.MONTH)); mMonthSpinner.setWrapSelectorWheel(false); } else if (mCurrentDate.equals(mMaxDate)) { mMonthSpinner.setDisplayedValues(null); mMonthSpinner.setMinValue(mCurrentDate.getActualMinimum(Calendar.MONTH)); mMonthSpinner.setMaxValue(mCurrentDate.get(Calendar.MONTH)); mMonthSpinner.setWrapSelectorWheel(false); } else { mMonthSpinner.setDisplayedValues(null); mMonthSpinner.setMinValue(0); mMonthSpinner.setMaxValue(11); mMonthSpinner.setWrapSelectorWheel(true); } // make sure the month names are a zero based array // with the months in the month spinner String[] displayedValues = Arrays.copyOfRange(mShortMonths, mMonthSpinner.getMinValue(), mMonthSpinner.getMaxValue() + 1); mMonthSpinner.setDisplayedValues(displayedValues); // year spinner range does not change based on the current date mYearSpinner.setMinValue(mMinDate.get(Calendar.YEAR)); mYearSpinner.setMaxValue(mMaxDate.get(Calendar.YEAR)); mYearSpinner.setWrapSelectorWheel(false); // set the spinner values mYearSpinner.setValue(mCurrentDate.get(Calendar.YEAR)); mMonthSpinner.setValue(mCurrentDate.get(Calendar.MONTH)); } /** * @return The selected year. */ public int getYear() { return mCurrentDate.get(Calendar.YEAR); } /** * @return The selected month. */ public int getMonth() { return mCurrentDate.get(Calendar.MONTH); } /** * @return The selected day of month. */ public int getDayOfMonth() { return mCurrentDate.get(Calendar.DAY_OF_MONTH); } /** * Notifies the listener, if such, for a change in the selected date. */ private void notifyDateChanged() { sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_SELECTED); if (mMonthChangedListener != null) { mMonthChangedListener.onMonthChanged(this, getYear(), getMonth()); } } /** * Class for managing state storing/restoring. */ private static class SavedState extends BaseSavedState { private final int mYear; private final int mMonth; /** * Constructor called from {@link DatePicker#onSaveInstanceState()} */ private SavedState(Parcelable superState, int year, int month) { super(superState); mYear = year; mMonth = month; } /** * Constructor called from {@link #CREATOR} */ private SavedState(Parcel in) { super(in); mYear = in.readInt(); mMonth = in.readInt(); } @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); dest.writeInt(mYear); dest.writeInt(mMonth); } @SuppressWarnings("all") // suppress unused and hiding public static final Parcelable.Creator<SavedState> CREATOR = new Creator<SavedState>() { public SavedState createFromParcel(Parcel in) { return new SavedState(in); } public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
package mblog.base.utils; import java.util.Date; /** * @author cdq2016/7/30. */ public class RelativeDateFormat { private static final long ONE_MINUTE = 60000L; private static final long ONE_HOUR = 3600000L; private static final long ONE_DAY = 86400000L; private static final long ONE_WEEK = 604800000L; private static final String ONE_SECOND_AGO = "秒前"; private static final String ONE_MINUTE_AGO = "分钟前"; private static final String ONE_HOUR_AGO = "小时前"; private static final String ONE_DAY_AGO = "天前"; private static final String ONE_MONTH_AGO = "月前"; private static final String ONE_YEAR_AGO = "年前"; private static final String ONE_UNKNOWN = "未知"; public static String format(Date date) { if (null == date) { return ONE_UNKNOWN; } long delta = new Date().getTime() - date.getTime(); if (delta < 1L * ONE_MINUTE) { long seconds = toSeconds(delta); return (seconds <= 0 ? 1 : seconds) + ONE_SECOND_AGO; } if (delta < 45L * ONE_MINUTE) { long minutes = toMinutes(delta); return (minutes <= 0 ? 1 : minutes) + ONE_MINUTE_AGO; } if (delta < 24L * ONE_HOUR) { long hours = toHours(delta); return (hours <= 0 ? 1 : hours) + ONE_HOUR_AGO; } if (delta < 48L * ONE_HOUR) { return "昨天"; } if (delta < 30L * ONE_DAY) { long days = toDays(delta); return (days <= 0 ? 1 : days) + ONE_DAY_AGO; } if (delta < 12L * 4L * ONE_WEEK) { long months = toMonths(delta); return (months <= 0 ? 1 : months) + ONE_MONTH_AGO; } else { long years = toYears(delta); return (years <= 0 ? 1 : years) + ONE_YEAR_AGO; } } private static long toSeconds(long date) { return date / 1000L; } private static long toMinutes(long date) { return toSeconds(date) / 60L; } private static long toHours(long date) { return toMinutes(date) / 60L; } private static long toDays(long date) { return toHours(date) / 24L; } private static long toMonths(long date) { return toDays(date) / 30L; } private static long toYears(long date) { return toMonths(date) / 365L; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cloudstack.engine.subsystem.api.storage; import org.apache.cloudstack.framework.async.AsyncCallFuture; import org.apache.cloudstack.framework.async.AsyncCompletionCallback; import org.apache.cloudstack.storage.command.CommandResult; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.storage.StoragePool; public interface TemplateService { class TemplateApiResult extends CommandResult { private final TemplateInfo template; public TemplateApiResult(TemplateInfo template) { super(); this.template = template; } public TemplateInfo getTemplate() { return template; } } void createTemplateAsync(TemplateInfo template, DataStore store, AsyncCompletionCallback<TemplateApiResult> callback); AsyncCallFuture<TemplateApiResult> createTemplateFromSnapshotAsync(SnapshotInfo snapshot, TemplateInfo template, DataStore store); AsyncCallFuture<TemplateApiResult> createTemplateFromVolumeAsync(VolumeInfo volume, TemplateInfo template, DataStore store); boolean createOvaDataDiskTemplates(TemplateInfo parentTemplate); AsyncCallFuture<TemplateApiResult> deleteTemplateAsync(TemplateInfo template); AsyncCallFuture<TemplateApiResult> copyTemplate(TemplateInfo srcTemplate, DataStore destStore); AsyncCallFuture<TemplateApiResult> prepareTemplateOnPrimary(TemplateInfo srcTemplate, StoragePool pool); AsyncCallFuture<TemplateApiResult> deleteTemplateOnPrimary(TemplateInfo template, StoragePool pool); void syncTemplateToRegionStore(long templateId, DataStore store); void handleSysTemplateDownload(HypervisorType hostHyper, Long dcId); void handleTemplateSync(DataStore store); void downloadBootstrapSysTemplate(DataStore store); void addSystemVMTemplatesToSecondary(DataStore store); void associateTemplateToZone(long templateId, Long zoneId); void associateCrosszoneTemplatesToZone(long dcId); AsyncCallFuture<TemplateApiResult> createDatadiskTemplateAsync(TemplateInfo parentTemplate, TemplateInfo dataDiskTemplate, String path, String diskId, long fileSize, boolean bootable); }
package com.imengyu.vr720.core.representation; /** * The Quaternion class. A Quaternion is a four-dimensional vector that is used to represent rotations of a rigid body * in the 3D space. It is very similar to a rotation vector; it contains an angle, encoded into the w component * and three components to describe the rotation-axis (encoded into x, y, z). * * <p> * Quaternions allow for elegant descriptions of 3D rotations, interpolations as well as extrapolations and compared to * Euler angles, they don't suffer from gimbal lock. Interpolations between two Quaternions are called SLERP (Spherical * Linear Interpolation). * </p> * * <p> * This class also contains the representation of the same rotation as a Quaternion and 4x4-Rotation-Matrix. * </p> * * @author Leigh Beattie, Alexander Pacha * */ public class Quaternion extends Vector4f { /** * Rotation matrix that contains the same rotation as the Quaternion in a 4x4 homogenised rotation matrix. * Remember that for performance reasons, this matrix is only updated, when it is accessed and not on every change * of the quaternion-values. */ private MatrixF4x4 matrix; /** * This variable is used to synchronise the rotation matrix with the current quaternion values. If someone has * changed the * quaternion numbers then the matrix will need to be updated. To save on processing we only really want to update * the matrix when someone wants to fetch it, instead of whenever someone sets a quaternion value. */ private boolean dirty = false; private Vector4f tmpVector = new Vector4f(); private Quaternion tmpQuaternion; /** * Creates a new Quaternion object and initialises it with the identity Quaternion */ public Quaternion() { super(); matrix = new MatrixF4x4(); loadIdentityQuat(); } /** * Normalise this Quaternion into a unity Quaternion. */ public void normalise() { this.dirty = true; float mag = (float) Math.sqrt(points[3] * points[3] + points[0] * points[0] + points[1] * points[1] + points[2] * points[2]); points[3] = points[3] / mag; points[0] = points[0] / mag; points[1] = points[1] / mag; points[2] = points[2] / mag; } @Override public void normalize() { normalise(); } /** * Copies the values from the given quaternion to this one * * @param quat The quaternion to copy from */ public void set(Quaternion quat) { this.dirty = true; copyVec4(quat); } /** * Multiply this quaternion by the input quaternion and store the result in the out quaternion * * @param input * @param output */ public void multiplyByQuat(Quaternion input, Quaternion output) { if (input != output) { output.points[3] = (points[3] * input.points[3] - points[0] * input.points[0] - points[1] * input.points[1] - points[2] * input.points[2]); //w = w1w2 - x1x2 - y1y2 - z1z2 output.points[0] = (points[3] * input.points[0] + points[0] * input.points[3] + points[1] * input.points[2] - points[2] * input.points[1]); //x = w1x2 + x1w2 + y1z2 - z1y2 output.points[1] = (points[3] * input.points[1] + points[1] * input.points[3] + points[2] * input.points[0] - points[0] * input.points[2]); //y = w1y2 + y1w2 + z1x2 - x1z2 output.points[2] = (points[3] * input.points[2] + points[2] * input.points[3] + points[0] * input.points[1] - points[1] * input.points[0]); //z = w1z2 + z1w2 + x1y2 - y1x2 } else { tmpVector.points[0] = input.points[0]; tmpVector.points[1] = input.points[1]; tmpVector.points[2] = input.points[2]; tmpVector.points[3] = input.points[3]; output.points[3] = (points[3] * tmpVector.points[3] - points[0] * tmpVector.points[0] - points[1] * tmpVector.points[1] - points[2] * tmpVector.points[2]); //w = w1w2 - x1x2 - y1y2 - z1z2 output.points[0] = (points[3] * tmpVector.points[0] + points[0] * tmpVector.points[3] + points[1] * tmpVector.points[2] - points[2] * tmpVector.points[1]); //x = w1x2 + x1w2 + y1z2 - z1y2 output.points[1] = (points[3] * tmpVector.points[1] + points[1] * tmpVector.points[3] + points[2] * tmpVector.points[0] - points[0] * tmpVector.points[2]); //y = w1y2 + y1w2 + z1x2 - x1z2 output.points[2] = (points[3] * tmpVector.points[2] + points[2] * tmpVector.points[3] + points[0] * tmpVector.points[1] - points[1] * tmpVector.points[0]); //z = w1z2 + z1w2 + x1y2 - y1x2 } } /** * Multiply this quaternion by the input quaternion and store the result in the out quaternion * * @param input * @param output */ public void multiplyByQuat(Quaternion input) { this.dirty = true; if(tmpQuaternion == null) tmpQuaternion = new Quaternion(); tmpQuaternion.copyVec4(this); multiplyByQuat(input, tmpQuaternion); this.copyVec4(tmpQuaternion); } /** * Multiplies this Quaternion with a scalar * * @param scalar the value that the vector should be multiplied with */ public void multiplyByScalar(float scalar) { this.dirty = true; multiplyByScalar(scalar); } /** * Add a quaternion to this quaternion * * @param input The quaternion that you want to add to this one */ public void addQuat(Quaternion input) { this.dirty = true; addQuat(input, this); } /** * Add this quaternion and another quaternion together and store the result in the output quaternion * * @param input The quaternion you want added to this quaternion * @param output The quaternion you want to store the output in. */ public void addQuat(Quaternion input, Quaternion output) { output.setX(getX() + input.getX()); output.setY(getY() + input.getY()); output.setZ(getZ() + input.getZ()); output.setW(getW() + input.getW()); } /** * Subtract a quaternion to this quaternion * * @param input The quaternion that you want to subtracted from this one */ public void subQuat(Quaternion input) { this.dirty = true; subQuat(input, this); } /** * Subtract another quaternion from this quaternion and store the result in the output quaternion * * @param input The quaternion you want subtracted from this quaternion * @param output The quaternion you want to store the output in. */ public void subQuat(Quaternion input, Quaternion output) { output.setX(getX() - input.getX()); output.setY(getY() - input.getY()); output.setZ(getZ() - input.getZ()); output.setW(getW() - input.getW()); } /** * Converts this Quaternion into the Rotation-Matrix representation which can be accessed by * {@link Quaternion#getMatrix4x4 getMatrix4x4} */ private void convertQuatToMatrix() { float x = points[0]; float y = points[1]; float z = points[2]; float w = points[3]; matrix.setX0(1 - 2 * (y * y) - 2 * (z * z)); //1 - 2y2 - 2z2 matrix.setX1(2 * (x * y) + 2 * (w * z)); // 2xy - 2wz matrix.setX2(2 * (x * z) - 2 * (w * y)); //2xz + 2wy matrix.setX3(0); matrix.setY0(2 * (x * y) - 2 * (w * z)); //2xy + 2wz matrix.setY1(1 - 2 * (x * x) - 2 * (z * z)); //1 - 2x2 - 2z2 matrix.setY2(2 * (y * z) + 2 * (w * x)); // 2yz + 2wx matrix.setY3(0); matrix.setZ0(2 * (x * z) + 2 * (w * y)); //2xz + 2wy matrix.setZ1(2 * (y * z) - 2 * (w * x)); //2yz - 2wx matrix.setZ2(1 - 2 * (x * x) - 2 * (y * y)); //1 - 2x2 - 2y2 matrix.setZ3(0); matrix.setW0(0); matrix.setW1(0); matrix.setW2(0); matrix.setW3(1); } /** * Get an axis angle representation of this quaternion. * * @param output Vector4f axis angle. */ public void toAxisAngle(Vector4f output) { if (getW() > 1) { normalise(); // if w>1 acos and sqrt will produce errors, this cant happen if quaternion is normalised } float angle = 2 * (float) Math.toDegrees(Math.acos(getW())); float x; float y; float z; float s = (float) Math.sqrt(1 - getW() * getW()); // assuming quaternion normalised then w is less than 1, so term always positive. if (s < 0.001) { // test to avoid divide by zero, s is always positive due to sqrt // if s close to zero then direction of axis not important x = points[0]; // if it is important that axis is normalised then replace with x=1; y=z=0; y = points[1]; z = points[2]; } else { x = points[0] / s; // normalise axis y = points[1] / s; z = points[2] / s; } output.points[0] = x; output.points[1] = y; output.points[2] = z; output.points[3] = angle; } /** * Returns the heading, attitude and bank of this quaternion as euler angles in the double array respectively * * @return An array of size 3 containing the euler angles for this quaternion */ public double[] toEulerAngles() { double[] ret = new double[3]; ret[0] = Math.atan2(2 * points[1] * getW() - 2 * points[0] * points[2], 1 - 2 * (points[1] * points[1]) - 2 * (points[2] * points[2])); // atan2(2*qy*qw-2*qx*qz , 1 - 2*qy2 - 2*qz2) ret[1] = Math.asin(2 * points[0] * points[1] + 2 * points[2] * getW()); // asin(2*qx*qy + 2*qz*qw) ret[2] = Math.atan2(2 * points[0] * getW() - 2 * points[1] * points[2], 1 - 2 * (points[0] * points[0]) - 2 * (points[2] * points[2])); // atan2(2*qx*qw-2*qy*qz , 1 - 2*qx2 - 2*qz2) return ret; } /** * Sets the quaternion to an identity quaternion of 0,0,0,1. */ public void loadIdentityQuat() { this.dirty = true; setX(0); setY(0); setZ(0); setW(1); } @Override public String toString() { return "{X: " + getX() + ", Y:" + getY() + ", Z:" + getZ() + ", W:" + getW() + "}"; } /** * This is an internal method used to build a quaternion from a rotation matrix and then sets the current quaternion * from that matrix. * */ private void generateQuaternionFromMatrix() { float qx; float qy; float qz; float qw; float[] mat = matrix.getMatrix(); int[] indices = null; if (this.matrix.size() == 16) { if (this.matrix.isColumnMajor()) { indices = MatrixF4x4.matIndCol16_3x3; } else { indices = MatrixF4x4.matIndRow16_3x3; } } else { if (this.matrix.isColumnMajor()) { indices = MatrixF4x4.matIndCol9_3x3; } else { indices = MatrixF4x4.matIndRow9_3x3; } } int m00 = indices[0]; int m01 = indices[1]; int m02 = indices[2]; int m10 = indices[3]; int m11 = indices[4]; int m12 = indices[5]; int m20 = indices[6]; int m21 = indices[7]; int m22 = indices[8]; float tr = mat[m00] + mat[m11] + mat[m22]; if (tr > 0) { float s = (float) Math.sqrt(tr + 1.0) * 2; // S=4*qw qw = 0.25f * s; qx = (mat[m21] - mat[m12]) / s; qy = (mat[m02] - mat[m20]) / s; qz = (mat[m10] - mat[m01]) / s; } else if ((mat[m00] > mat[m11]) & (mat[m00] > mat[m22])) { float s = (float) Math.sqrt(1.0 + mat[m00] - mat[m11] - mat[m22]) * 2; // S=4*qx qw = (mat[m21] - mat[m12]) / s; qx = 0.25f * s; qy = (mat[m01] + mat[m10]) / s; qz = (mat[m02] + mat[m20]) / s; } else if (mat[m11] > mat[m22]) { float s = (float) Math.sqrt(1.0 + mat[m11] - mat[m00] - mat[m22]) * 2; // S=4*qy qw = (mat[m02] - mat[m20]) / s; qx = (mat[m01] + mat[m10]) / s; qy = 0.25f * s; qz = (mat[m12] + mat[m21]) / s; } else { float s = (float) Math.sqrt(1.0 + mat[m22] - mat[m00] - mat[m11]) * 2; // S=4*qz qw = (mat[m10] - mat[m01]) / s; qx = (mat[m02] + mat[m20]) / s; qy = (mat[m12] + mat[m21]) / s; qz = 0.25f * s; } setX(qx); setY(qy); setZ(qz); setW(qw); } /** * You can set the values for this quaternion based off a rotation matrix. If the matrix you supply is not a * rotation matrix this will fail. You MUST provide a 4x4 matrix. * * @param matrix A column major rotation matrix */ public void setColumnMajor(float[] matrix) { this.matrix.setMatrix(matrix); this.matrix.setColumnMajor(true); generateQuaternionFromMatrix(); } /** * You can set the values for this quaternion based off a rotation matrix. If the matrix you supply is not a * rotation matrix this will fail. * * @param matrix A column major rotation matrix */ public void setRowMajor(float[] matrix) { this.matrix.setMatrix(matrix); this.matrix.setColumnMajor(false); generateQuaternionFromMatrix(); } /** * Set this quaternion from axis angle values. All rotations are in degrees. * * @param azimuth The rotation around the z axis * @param pitch The rotation around the y axis * @param roll The rotation around the x axis */ public void setEulerAngle(float azimuth, float pitch, float roll) { double heading = Math.toRadians(roll); double attitude = Math.toRadians(pitch); double bank = Math.toRadians(azimuth); double c1 = Math.cos(heading / 2); double s1 = Math.sin(heading / 2); double c2 = Math.cos(attitude / 2); double s2 = Math.sin(attitude / 2); double c3 = Math.cos(bank / 2); double s3 = Math.sin(bank / 2); double c1c2 = c1 * c2; double s1s2 = s1 * s2; setW((float) (c1c2 * c3 - s1s2 * s3)); setX((float) (c1c2 * s3 + s1s2 * c3)); setY((float) (s1 * c2 * c3 + c1 * s2 * s3)); setZ((float) (c1 * s2 * c3 - s1 * c2 * s3)); dirty = true; } /** * Rotation is in degrees. Set this quaternion from the supplied axis angle. * * @param vec The vector of rotation * @param rot The angle of rotation around that vector in degrees. */ public void setAxisAngle(Vector3f vec, float rot) { double s = Math.sin(Math.toRadians(rot / 2)); setX(vec.getX() * (float) s); setY(vec.getY() * (float) s); setZ(vec.getZ() * (float) s); setW((float) Math.cos(Math.toRadians(rot / 2))); dirty = true; } public void setAxisAngleRad(Vector3f vec, double rot) { double s = rot / 2; setX(vec.getX() * (float) s); setY(vec.getY() * (float) s); setZ(vec.getZ() * (float) s); setW((float) rot / 2); dirty = true; } /** * @return Returns this Quaternion in the Rotation Matrix representation */ public MatrixF4x4 getMatrix4x4() { //toMatrixColMajor(); if (dirty) { convertQuatToMatrix(); dirty = false; } return this.matrix; } public void copyFromVec3(Vector3f vec, float w) { copyFromV3f(vec, w); } /** * Get a linear interpolation between this quaternion and the input quaternion, storing the result in the output * quaternion. * * @param input The quaternion to be slerped with this quaternion. * @param output The quaternion to store the result in. * @param t The ratio between the two quaternions where 0 <= t <= 1.0 . Increase value of t will bring rotation * closer to the input quaternion. */ public void slerp(Quaternion input, Quaternion output, float t) { // Calculate angle between them. //double cosHalftheta = this.dotProduct(input); Quaternion bufferQuat; float cosHalftheta = this.dotProduct(input); if (cosHalftheta < 0) { if(tmpQuaternion == null) tmpQuaternion = new Quaternion(); bufferQuat = tmpQuaternion; cosHalftheta = -cosHalftheta; bufferQuat.points[0] = (-input.points[0]); bufferQuat.points[1] = (-input.points[1]); bufferQuat.points[2] = (-input.points[2]); bufferQuat.points[3] = (-input.points[3]); } else { bufferQuat = input; } /** * if(dot < 0.95f){ * double angle = Math.acos(dot); * double ratioA = Math.sin((1 - t) * angle); * double ratioB = Math.sin(t * angle); * double divisor = Math.sin(angle); * * //Calculate Quaternion * output.setW((float)((this.getW() * ratioA + input.getW() * ratioB)/divisor)); * output.setX((float)((this.getX() * ratioA + input.getX() * ratioB)/divisor)); * output.setY((float)((this.getY() * ratioA + input.getY() * ratioB)/divisor)); * output.setZ((float)((this.getZ() * ratioA + input.getZ() * ratioB)/divisor)); * } * else{ * lerp(input, output, t); * } */ // if qa=qb or qa=-qb then theta = 0 and we can return qa if (Math.abs(cosHalftheta) >= 1.0) { output.points[0] = (this.points[0]); output.points[1] = (this.points[1]); output.points[2] = (this.points[2]); output.points[3] = (this.points[3]); } else { double sinHalfTheta = Math.sqrt(1.0 - cosHalftheta * cosHalftheta); // if theta = 180 degrees then result is not fully defined // we could rotate around any axis normal to qa or qb //if(Math.abs(sinHalfTheta) < 0.001){ //output.setW(this.getW() * 0.5f + input.getW() * 0.5f); //output.setX(this.getX() * 0.5f + input.getX() * 0.5f); //output.setY(this.getY() * 0.5f + input.getY() * 0.5f); //output.setZ(this.getZ() * 0.5f + input.getZ() * 0.5f); // lerp(bufferQuat, output, t); //} //else{ double halfTheta = Math.acos(cosHalftheta); double ratioA = Math.sin((1 - t) * halfTheta) / sinHalfTheta; double ratioB = Math.sin(t * halfTheta) / sinHalfTheta; //Calculate Quaternion output.points[3] = ((float) (points[3] * ratioA + bufferQuat.points[3] * ratioB)); output.points[0] = ((float) (this.points[0] * ratioA + bufferQuat.points[0] * ratioB)); output.points[1] = ((float) (this.points[1] * ratioA + bufferQuat.points[1] * ratioB)); output.points[2] = ((float) (this.points[2] * ratioA + bufferQuat.points[2] * ratioB)); //} } } }
package com.outlook.bigkun.idcard; import com.outlook.bigkun.framework.Factory; import com.outlook.bigkun.framework.Product; import java.util.ArrayList; import java.util.List; /** * @author zhanghk * @since 2019/7/11 */ public class IDCardFactory extends Factory { private List<String> owners = new ArrayList(); @Override protected void registerProduct(Product product) { owners.add(((IDCard) product).getOwner()); } @Override protected Product createProduct(String owner) { return new IDCard(owner); } public List<String> getOwners() { return owners; } }
package customList; import gmailServices.FormattedMessage; import javafx.geometry.Pos; import javafx.scene.control.Label; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.layout.*; /** * Created by Ashok on 4/17/2017. */ public class CustomListCellView { GridPane listCellBox = new GridPane(); ImageView profilePic = new ImageView(); Label name = new Label(); Label subject = new Label(); Label dateText = new Label(); public CustomListCellView(){ sceneLayout(); } public void sceneLayout(){ ColumnConstraints column0 = new ColumnConstraints(50,50,100); ColumnConstraints column1 = new ColumnConstraints(50,120,250); ColumnConstraints column2 = new ColumnConstraints(50, 230, 250); ColumnConstraints column3 = new ColumnConstraints(50, 100, 150); column0.setHgrow(Priority.ALWAYS); column1.setHgrow(Priority.ALWAYS); column2.setHgrow(Priority.ALWAYS); column3.setHgrow(Priority.ALWAYS); listCellBox.getColumnConstraints().add(column0); listCellBox.getColumnConstraints().add(column1); listCellBox.getColumnConstraints().add(column2); listCellBox.getColumnConstraints().add(column3); listCellBox.getRowConstraints().add(new RowConstraints()); name.setAlignment(Pos.CENTER_LEFT); dateText.setAlignment(Pos.CENTER_RIGHT); subject.setAlignment(Pos.CENTER_LEFT); profilePic.setFitWidth(40); profilePic.setFitHeight(40); BorderPane col0 = new BorderPane(); col0.setCenter(profilePic); listCellBox.add(col0, 0,0); listCellBox.add(name,1,0); listCellBox.add(subject, 2,0); listCellBox.add(dateText,3,0); } public void setInfo(FormattedMessage data, String labelId){ subject.setText(data.getSubject()); switch(labelId){ case "INBOX" : name.setText(data.getFrom()); break; case "SENT" : name.setText(data.getTo()); break; case "DRAFT" : name.setText(data.getTo()); break; case "TRASH" : name.setText(data.getFrom()); } profilePic.setImage(new Image(getClass().getResourceAsStream("/account_circle_grey_192x192.png"))); dateText.setText(data.getDate()); } public GridPane getListCellBox() { return listCellBox;} }
package models; import com.google.gson.annotations.Expose; import java.util.Objects; public class Player { /* -- @Expose to tell gson to add below fields to json returned to user -- */ @Expose private char type; @Expose private int id; /* -- end fields to serialize here -- */ /** * Constructor for Player class. * * @param type char representing player's selected board character * @param id integer identifying the player */ public Player(char type, int id) { this.type = type; this.id = id; } /** * Returns the player's game board "type", analogous to their player piece. * * @return char, representing player type */ public char getType() { return type; } /** * Sets the player's type. * * @param type char, representing type of player */ public void setType(char type) { this.type = type; } /** * Returns ID of the player. * * @return integer representing ID of player */ public int getId() { return id; } /** * Sets the ID of the player. * * @param id integer representing ID of player */ public void setId(int id) { this.id = id; } /** * Allows equality to be tested among Player instances. */ @Override public boolean equals(Object o) { if (!(o instanceof Player)) { return false; } Player compared = (Player) o; return compared.getType() == this.getType() && compared.getId() == this.getId(); } /** * Converts the Player object into a corresponding string containing it state. */ @Override public String toString() { return "Player [type=" + type + ", id=" + id + "]"; } /** * Required to override equal testing for Player objects. */ @Override public int hashCode() { return Objects.hash(type, id); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.startup; import org.apache.catalina.Container; import org.apache.catalina.LifecycleListener; import org.apache.tomcat.util.IntrospectionUtils; import org.apache.tomcat.util.digester.Rule; import org.xml.sax.Attributes; /** * Rule that creates a new {@link LifecycleListener} and associates it with the * top object on the stack which must implement {@link Container}. The * implementation class to be used is determined by: * <ol> * <li>Does the top element on the stack specify an implementation class using * the attribute specified when this rule was created?</li> * <li>Does the parent {@link Container} of the {@link Container} on the top of * the stack specify an implementation class using the attribute specified * when this rule was created?</li> * <li>Use the default implementation class specified when this rule was * created.</li> * </ol> */ public class LifecycleListenerRule extends Rule { // ----------------------------------------------------------- Constructors /** * Construct a new instance of this Rule. * * @param listenerClass Default name of the LifecycleListener * implementation class to be created * @param attributeName Name of the attribute that optionally * includes an override name of the LifecycleListener class */ public LifecycleListenerRule(String listenerClass, String attributeName) { this.listenerClass = listenerClass; this.attributeName = attributeName; } // ----------------------------------------------------- Instance Variables /** * The attribute name of an attribute that can override the * implementation class name. */ private final String attributeName; /** * The name of the <code>LifecycleListener</code> implementation class. */ private final String listenerClass; // --------------------------------------------------------- Public Methods /** * Handle the beginning of an XML element. * * @param attributes The attributes of this element * * @exception Exception if a processing error occurs */ @Override public void begin(String namespace, String name, Attributes attributes) throws Exception { Container c = (Container) digester.peek(); Container p = null; Object obj = digester.peek(1); if (obj instanceof Container) { p = (Container) obj; } String className = null; // Check the container for the specified attribute if (attributeName != null) { String value = attributes.getValue(attributeName); if (value != null) className = value; } // Check the container's parent for the specified attribute if (p != null && className == null) { String configClass = (String) IntrospectionUtils.getProperty(p, attributeName); if (configClass != null && configClass.length() > 0) { className = configClass; } } // Use the default if (className == null) { className = listenerClass; } // Instantiate a new LifecycleListener implementation object Class<?> clazz = Class.forName(className); LifecycleListener listener = (LifecycleListener) clazz.getConstructor().newInstance(); // Add this LifecycleListener to our associated component c.addLifecycleListener(listener); } }
package dev.morphia.issue463; import dev.morphia.TestBase; import dev.morphia.annotations.Entity; import dev.morphia.annotations.Id; import org.bson.Document; import org.bson.types.ObjectId; import org.junit.Assert; import org.junit.Test; public class TestIssue463 extends TestBase { @Test public void save() { getMapper().map(Class1.class, Class2.class); final Class2 class2 = new Class2(); class2.setId(new ObjectId()); class2.setText("hello world"); getDs().save(class2); final Document query = new Document("_id", class2.getId()); Assert.assertNull(getMapper().getCollection(Class1.class).find(query).first()); Assert.assertNotNull(getMapper().getCollection(Class2.class).find(query).first()); } @Entity(value = "class1", useDiscriminator = false) public static class Class1 { @Id private ObjectId id; private String text; public ObjectId getId() { return id; } public void setId(final ObjectId id) { this.id = id; } public String getText() { return text; } public void setText(final String text) { this.text = text; } } @Entity(value = "class2", useDiscriminator = false) public static class Class2 extends Class1 { } }
package mono.com.squareup.picasso; public class Picasso_ListenerImplementor extends java.lang.Object implements mono.android.IGCUserPeer, com.squareup.picasso.Picasso.Listener { /** @hide */ public static final String __md_methods; static { __md_methods = "n_onImageLoadFailed:(Lcom/squareup/picasso/Picasso;Landroid/net/Uri;Ljava/lang/Exception;)V:GetOnImageLoadFailed_Lcom_squareup_picasso_Picasso_Landroid_net_Uri_Ljava_lang_Exception_Handler:Square.Picasso.Picasso/IListenerInvoker, Square.Picasso\n" + ""; mono.android.Runtime.register ("Square.Picasso.Picasso+IListenerImplementor, Square.Picasso, Version=2.5.2.0, Culture=neutral, PublicKeyToken=null", Picasso_ListenerImplementor.class, __md_methods); } public Picasso_ListenerImplementor () throws java.lang.Throwable { super (); if (getClass () == Picasso_ListenerImplementor.class) mono.android.TypeManager.Activate ("Square.Picasso.Picasso+IListenerImplementor, Square.Picasso, Version=2.5.2.0, Culture=neutral, PublicKeyToken=null", "", this, new java.lang.Object[] { }); } public void onImageLoadFailed (com.squareup.picasso.Picasso p0, android.net.Uri p1, java.lang.Exception p2) { n_onImageLoadFailed (p0, p1, p2); } private native void n_onImageLoadFailed (com.squareup.picasso.Picasso p0, android.net.Uri p1, java.lang.Exception p2); private java.util.ArrayList refList; public void monodroidAddReference (java.lang.Object obj) { if (refList == null) refList = new java.util.ArrayList (); refList.add (obj); } public void monodroidClearReferences () { if (refList != null) refList.clear (); } }
/* * */ package com.synectiks.process.common.plugins.views.migrations.V20191203120602_MigrateSavedSearchesToViewsSupport.view; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.auto.value.AutoValue; import com.synectiks.process.common.plugins.views.migrations.V20191203120602_MigrateSavedSearchesToViewsSupport.search.Time; @AutoValue public abstract class Pivot { private static final String TYPE_TIME = "time"; static final String FIELD_FIELD_NAME = "field"; static final String FIELD_TYPE = "type"; static final String FIELD_CONFIG = "config"; @JsonProperty(FIELD_FIELD_NAME) public abstract String field(); @JsonProperty(FIELD_TYPE) public String type() { return TYPE_TIME; } @JsonProperty(FIELD_CONFIG) public abstract TimeHistogramConfig config(); static Builder timeBuilder() { return new AutoValue_Pivot.Builder() .config(TimeHistogramConfig.create()); } Time toBucketSpec() { return Time.create(field(), config().interval().toBucketInterval()); } @AutoValue.Builder public static abstract class Builder { public abstract Builder field(String field); public abstract Builder config(TimeHistogramConfig config); public abstract Pivot build(); } }
/* * Copyright © 2015-2019 Santer Reply S.p.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.reply.orchestrator.enums; public enum Task { NONE, MONITORING, QOS, BROKERING, DMS, DEPLOYER, UNKNOWN; }
package org.gradle.test.performance.mediummonolithicjavaproject.p73; public class Production1470 { private String property0; public String getProperty0() { return property0; } public void setProperty0(String value) { property0 = value; } private String property1; public String getProperty1() { return property1; } public void setProperty1(String value) { property1 = value; } private String property2; public String getProperty2() { return property2; } public void setProperty2(String value) { property2 = value; } private String property3; public String getProperty3() { return property3; } public void setProperty3(String value) { property3 = value; } private String property4; public String getProperty4() { return property4; } public void setProperty4(String value) { property4 = value; } private String property5; public String getProperty5() { return property5; } public void setProperty5(String value) { property5 = value; } private String property6; public String getProperty6() { return property6; } public void setProperty6(String value) { property6 = value; } private String property7; public String getProperty7() { return property7; } public void setProperty7(String value) { property7 = value; } private String property8; public String getProperty8() { return property8; } public void setProperty8(String value) { property8 = value; } private String property9; public String getProperty9() { return property9; } public void setProperty9(String value) { property9 = value; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.language.simple; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import org.apache.camel.language.simple.types.SimpleToken; import org.apache.camel.language.simple.types.SimpleTokenType; import org.apache.camel.language.simple.types.TokenType; /** * Tokenizer to create {@link SimpleToken} from the input. */ public final class SimpleTokenizer { // use CopyOnWriteArrayList so we can modify it in the for loop when changing function start/end tokens private static final List<SimpleTokenType> KNOWN_TOKENS = new CopyOnWriteArrayList<SimpleTokenType>(); static { // add known tokens KNOWN_TOKENS.add(new SimpleTokenType(TokenType.whiteSpace, " ")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.whiteSpace, "\t")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.whiteSpace, "\n")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.whiteSpace, "\r")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.singleQuote, "'")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.doubleQuote, "\"")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.functionStart, "${")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.functionStart, "$simple{")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.functionEnd, "}")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.booleanValue, "true")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.booleanValue, "false")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.nullValue, "null")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.escape, "\\")); // binary operators KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "==")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, ">=")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "<=")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, ">")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "<")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "!=")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "not is")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "is")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "not contains")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "contains")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "not regex")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "regex")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "not in")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "in")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "range")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.binaryOperator, "not range")); // unary operators KNOWN_TOKENS.add(new SimpleTokenType(TokenType.unaryOperator, "++")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.unaryOperator, "--")); // logical operators KNOWN_TOKENS.add(new SimpleTokenType(TokenType.logicalOperator, "&&")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.logicalOperator, "||")); // TODO: @deprecated logical operators, to be removed in Camel 3.0 KNOWN_TOKENS.add(new SimpleTokenType(TokenType.logicalOperator, "and")); KNOWN_TOKENS.add(new SimpleTokenType(TokenType.logicalOperator, "or")); } private SimpleTokenizer() { // static methods } /** * @see SimpleLanguage#changeFunctionStartToken(String...) */ public static void changeFunctionStartToken(String... startToken) { for (SimpleTokenType type : KNOWN_TOKENS) { if (type.getType() == TokenType.functionStart) { KNOWN_TOKENS.remove(type); } } // add in start of list as its a more common token to be used for (String token : startToken) { KNOWN_TOKENS.add(0, new SimpleTokenType(TokenType.functionStart, token)); } } /** * @see SimpleLanguage#changeFunctionEndToken(String...) */ public static void changeFunctionEndToken(String... endToken) { for (SimpleTokenType type : KNOWN_TOKENS) { if (type.getType() == TokenType.functionEnd) { KNOWN_TOKENS.remove(type); } } // add in start of list as its a more common token to be used for (String token : endToken) { KNOWN_TOKENS.add(0, new SimpleTokenType(TokenType.functionEnd, token)); } } /** * Create the next token * * @param expression the input expression * @param index the current index * @param allowEscape whether to allow escapes * @param filter defines the accepted token types to be returned (character is always used as fallback) * @return the created token, will always return a token */ public static SimpleToken nextToken(String expression, int index, boolean allowEscape, TokenType... filter) { return doNextToken(expression, index, allowEscape, filter); } /** * Create the next token * * @param expression the input expression * @param index the current index * @param allowEscape whether to allow escapes * @return the created token, will always return a token */ public static SimpleToken nextToken(String expression, int index, boolean allowEscape) { return doNextToken(expression, index, allowEscape); } private static SimpleToken doNextToken(String expression, int index, boolean allowEscape, TokenType... filters) { boolean numericAllowed = acceptType(TokenType.numericValue, filters); if (numericAllowed) { // is it a numeric value StringBuilder sb = new StringBuilder(); boolean digit = true; while (digit && index < expression.length()) { digit = Character.isDigit(expression.charAt(index)); if (digit) { char ch = expression.charAt(index); sb.append(ch); index++; continue; } // is it a dot or comma as part of a floating point number boolean decimalSeparator = '.' == expression.charAt(index) || ',' == expression.charAt(index); if (decimalSeparator && sb.length() > 0) { char ch = expression.charAt(index); sb.append(ch); index++; // assume its still a digit digit = true; continue; } } if (sb.length() > 0) { return new SimpleToken(new SimpleTokenType(TokenType.numericValue, sb.toString()), index); } } boolean escapeAllowed = allowEscape && acceptType(TokenType.escape, filters); if (escapeAllowed) { StringBuilder sb = new StringBuilder(); char ch = expression.charAt(index); boolean escaped = '\\' == ch; if (escaped && index < expression.length() - 1) { // grab next character to escape char next = expression.charAt(++index); // special for new line, tabs and carriage return boolean special = false; if ('n' == next) { sb.append("\n"); special = true; } else if ('t' == next) { sb.append("\t"); special = true; } else if ('r' == next) { sb.append("\r"); special = true; } else { // not special just a regular character sb.append(ch); } // force 2 as length if special return new SimpleToken(new SimpleTokenType(TokenType.character, sb.toString()), index, special ? 2 : 1); } } // it could be any of the known tokens String text = expression.substring(index); for (SimpleTokenType token : KNOWN_TOKENS) { if (acceptType(token.getType(), filters)) { if (text.startsWith(token.getValue())) { return new SimpleToken(token, index); } } } // fallback and create a character token char ch = expression.charAt(index); SimpleToken token = new SimpleToken(new SimpleTokenType(TokenType.character, "" + ch), index); return token; } private static boolean acceptType(TokenType type, TokenType... filters) { if (filters == null || filters.length == 0) { return true; } for (TokenType filter : filters) { if (type == filter) { return true; } } return false; } }
package p005cm.aptoide.p006pt.dataprovider.p010ws.p012v3; import android.content.SharedPreferences; import android.text.TextUtils; import p005cm.aptoide.p006pt.dataprovider.interfaces.TokenInvalidator; import p005cm.aptoide.p006pt.dataprovider.p010ws.BodyInterceptor; import p005cm.aptoide.p006pt.preferences.managed.ManagerPreferences; import p026rx.C0120S; /* renamed from: cm.aptoide.pt.dataprovider.ws.v3.PushNotificationsRequest */ public class PushNotificationsRequest extends C2847V3<GetPushNotificationsResponse> { protected PushNotificationsRequest(BaseBody baseBody, BodyInterceptor<BaseBody> bodyInterceptor, TokenInvalidator tokenInvalidator) { super("https://webservices.aptoide.com/webservices/3/", baseBody, bodyInterceptor, tokenInvalidator); } /* renamed from: of */ public static PushNotificationsRequest m7453of(SharedPreferences sharedPreferences, String oemId, BodyInterceptor<BaseBody> bodyInterceptor, TokenInvalidator tokenInvalidator, int applicationVersionCode, int lastPushNotificationId, String countryCode) { BaseBody args = new BaseBody(); args.put("oem_id", oemId); args.put("mode", "json"); args.put("limit", "1"); args.put("lang", countryCode); String str = "notification_type"; if (ManagerPreferences.isDebug(sharedPreferences)) { String notificationType = ManagerPreferences.getNotificationType(sharedPreferences); args.put(str, TextUtils.isEmpty(notificationType) ? "aptoide_tests" : notificationType); } else { args.put(str, "aptoide_vanilla"); } args.put("id", String.valueOf(lastPushNotificationId)); args.put("aptoide_vercode", Integer.toString(applicationVersionCode)); return new PushNotificationsRequest(args, bodyInterceptor, tokenInvalidator); } /* access modifiers changed from: protected */ public C0120S<GetPushNotificationsResponse> loadDataFromNetwork(Service service, boolean bypassCache) { return service.getPushNotifications(this.map, bypassCache); } }
package tek.runtime; import org.joml.Vector2f; import org.joml.Vector3f; import org.joml.Vector4f; public class Transform { public Vector3f position, rotation, scale, velocity; { //local creation position = new Vector3f(); rotation = new Vector3f(); scale = new Vector3f(1f); velocity = new Vector3f(); } public Transform(){ } public Transform(Vector3f position) { this.position.set(position); } public Transform(Vector3f position, Vector3f rotation) { this.position.set(position); this.rotation.set(rotation); } public Transform(Vector3f position, Vector3f rotation, Vector3f scale){ this.position.set(position); this.rotation.set(rotation); this.scale = scale; } public Transform(Transform t){ this.position.set(t.position); this.rotation.set(t.rotation); this.scale.set(t.scale); this.velocity.set(t.velocity); } /* ------ UTILITY ------ */ public void move(float x, float y, float z){ z *= -1; y *= -1; if(z != 0){ position.x += (float)Math.sin(Math.toRadians(rotation.y)) * -1f * z; position.z += (float)Math.cos(Math.toRadians(rotation.y)) * z; } if(x != 0){ position.x += (float)Math.sin(Math.toRadians(rotation.y - 90)) * -1f * x; position.z += (float)Math.cos(Math.toRadians(rotation.y - 90)) * x; } position.y += y; } public void rotate(float dx, float dy){ if(rotation.y + dy > 360){ rotation.y = rotation.y + dy - 360; }else if(rotation.y + dy < 0){ rotation.y = 360 - dy; } if(rotation.x + dx > 360){ rotation.x = rotation.x + dx - 360; }else if(rotation.x + dx < 0){ rotation.x = 360 - rotation.x + dx; } } /* ------ POSITION ------*/ public void setX(float x){ position.x = x; } public void setY(float y){ position.y = y; } public void setZ(float z){ position.z = z; } public float getX(){ return position.x; } public float getY(){ return position.y; } public float getZ(){ return position.z; } public void setPosition(Vector2f vec){ this.position.x = vec.x; this.position.y = vec.y; } public void setPosition(float x, float y){ this.position.x = x; this.position.y = y; } public void setPosition(Vector3f vec){ this.position.set(vec); } public void setPosition(float x, float y, float z){ this.position.set(x,y,z); } public void setPosition(Vector4f vec){ this.position.set(vec.x, vec.y, vec.z); } /* ------ ROTATION ------*/ public void setRX(float x){ rotation.x = x; } public void setRY(float y){ rotation.y = y; } public void setRZ(float z){ rotation.z = z; } public float getRX(){ return rotation.x; } public float getRY(){ return rotation.y; } public float getRZ(){ return rotation.z; } public void setRotation(Vector2f vec){ this.rotation.x = vec.x; this.rotation.y = vec.y; } public void setRotation(float x, float y){ this.rotation.x = x; this.rotation.y = y; } public void setRotation(Vector3f vec){ this.rotation.set(vec); } public void setRotation(float x, float y, float z){ this.rotation.set(x,y,z); } public void setRotation(Vector4f vec){ this.rotation.set(vec.x, vec.y, vec.z); } /* ------ SCALE ------*/ public void setSX(float x){ scale.x = x; } public void setSY(float y){ scale.y = y; } public void setSZ(float z){ scale.z = z; } public float getSX(){ return scale.x; } public float getSY(){ return scale.y; } public float getSZ(){ return scale.z; } public void setScale(Vector2f vec){ this.scale.x = vec.x; this.scale.y = vec.y; } public void setScale(float x, float y){ this.scale.x = x; this.scale.y = y; } public void setScale(Vector3f vec){ this.scale.set(vec); } public void setPositon(float x, float y, float z){ this.scale.set(x,y,z); } public void setScale(Vector4f vec){ this.scale.set(vec.x, vec.y, vec.z); } /* ------ VELOCITY ------*/ public void setVX(float x){ velocity.x = x; } public void setVY(float y){ velocity.y = y; } public void setVZ(float z){ velocity.z = z; } public float getVX(){ return velocity.x; } public float getVY(){ return velocity.y; } public float getVZ(){ return velocity.z; } public void setVelocity(Vector2f vec){ this.velocity.x = vec.x; this.velocity.y = vec.y; } public void setVelocity(float x, float y){ this.velocity.x = x; this.velocity.y = y; } public void setVelocity(Vector3f vec){ this.velocity.set(vec); } public void seVelocity(float x, float y, float z){ this.velocity.set(x,y,z); } public void setVelocity(Vector4f vec){ this.velocity.set(vec.x, vec.y, vec.z); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package listinhadosucesso7; /** * * @author Candido */ public class ListinhaDoSucesso7 { /** * @param args the command line arguments */ public static void main(String[] args) { // TODO code application logic here Microondas m = new Microondas(true); m.imprimir(); } }
package com.anysoftkeyboard.ui.settings; import static androidx.test.core.app.ApplicationProvider.getApplicationContext; import android.content.res.Configuration; import android.support.v4.app.Fragment; import android.widget.LinearLayout; import com.anysoftkeyboard.AnySoftKeyboardRobolectricTestRunner; import com.anysoftkeyboard.RobolectricFragmentTestCase; import com.kasahorow.android.keyboard.app.R; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.annotation.Config; @RunWith(AnySoftKeyboardRobolectricTestRunner.class) public abstract class BaseSettingsFragmentTest<T extends Fragment> extends RobolectricFragmentTestCase<T> { @Test @Config(qualifiers = "w480dp-h800dp-land-mdpi") public void testLandscape() { getApplicationContext().getResources().getConfiguration().orientation = Configuration.ORIENTATION_LANDSCAPE; final T fragment = startFragment(); final LinearLayout rootView = fragment.getView().findViewById(R.id.settings_root); Assert.assertEquals(LinearLayout.HORIZONTAL, rootView.getOrientation()); Assert.assertEquals(rootView.getChildCount(), rootView.getWeightSum(), 0f); } @Test public void testPortrait() { getApplicationContext().getResources().getConfiguration().orientation = Configuration.ORIENTATION_PORTRAIT; final T fragment = startFragment(); final LinearLayout rootView = fragment.getView().findViewById(R.id.settings_root); Assert.assertEquals(LinearLayout.VERTICAL, rootView.getOrientation()); } }
package com.github.tobato.fastdfs.domain.proto.tracker.internal; import com.github.tobato.fastdfs.domain.proto.CmdConstants; import com.github.tobato.fastdfs.domain.proto.FdfsRequest; import com.github.tobato.fastdfs.domain.proto.OtherConstants; import com.github.tobato.fastdfs.domain.proto.ProtoHead; import com.github.tobato.fastdfs.domain.proto.mapper.DynamicFieldType; import com.github.tobato.fastdfs.domain.proto.mapper.FdfsColumn; import org.apache.commons.lang3.Validate; /** * 获取源服务器 * * @author tobato */ public class TrackerGetFetchStorageRequest extends FdfsRequest { private static final byte fetchCmd = CmdConstants.TRACKER_PROTO_CMD_SERVICE_QUERY_FETCH_ONE; private static final byte updateCmd = CmdConstants.TRACKER_PROTO_CMD_SERVICE_QUERY_UPDATE; /** * 组名 */ @FdfsColumn(index = 0, max = OtherConstants.FDFS_GROUP_NAME_MAX_LEN) private String groupName; /** * 路径名 */ @FdfsColumn(index = 1, dynamicField = DynamicFieldType.allRestByte) private String path; /** * 获取文件源服务器 * * @param groupName * @param path */ public TrackerGetFetchStorageRequest(String groupName, String path, boolean toUpdate) { Validate.notBlank(groupName, "分组不能为空"); Validate.notBlank(path, "文件路径不能为空"); this.groupName = groupName; this.path = path; if (toUpdate) { head = new ProtoHead(updateCmd); } else { head = new ProtoHead(fetchCmd); } } public String getGroupName() { return groupName; } public String getPath() { return path; } }
/******************************************************************************* * ___ _ ____ ____ * / _ \ _ _ ___ ___| |_| _ \| __ ) * | | | | | | |/ _ \/ __| __| | | | _ \ * | |_| | |_| | __/\__ \ |_| |_| | |_) | * \__\_\\__,_|\___||___/\__|____/|____/ * * Copyright (c) 2014-2019 Appsicle * Copyright (c) 2019-2022 QuestDB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package io.questdb.cairo.sql; import io.questdb.cairo.SymbolMapReader; import org.jetbrains.annotations.Nullable; import java.io.Closeable; public interface PageFrameCursor extends Closeable { @Override void close(); // we don't throw IOException @Nullable PageFrame next(); /** * Return the cursor to the beginning of the page frame. * Sets page address to first column. */ void toTop(); /** * @return size of page in bytes */ long size(); SymbolMapReader getSymbolMapReader(int columnIndex); }
/* * Copyright 2021 Shulie Technology, Co.Ltd * Email: shulie@shulie.io * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * See the License for the specific language governing permissions and * limitations under the License. */ package io.shulie.tro.web.common.vo.blacklist; import lombok.Data; /** * @author 无涯 * @Package io.shulie.tro.web.data.result.blacklist * @date 2021/4/6 2:25 下午 */ @Data public class BlacklistVO { /** * 主键id */ private Long blistId; /** * 黑名单类型 */ private Integer type ; /** * 黑名单类型 */ private String redisKey ; /** * 应用id */ private Long applicationId; /** * 插入时间 */ private String gmtCreate; /** * 变更时间 */ private String gmtModified; /** * 是否可用(0表示未启动,1表示启动,2表示启用未校验) */ private Integer useYn; /** * 租户id */ private Long customerId; /** * 用户id */ private Long userId; private Boolean canEdit = true; private Boolean canRemove = true; private Boolean canEnableDisable = true; }
package com.wg.gpm.parser; import com.google.common.collect.Lists; import org.junit.Before; import org.junit.Test; import com.wg.gpm.message.PostDetails; import java.util.ArrayDeque; import java.util.Deque; import java.util.Optional; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * Created by aidan on 05/11/16. */ public class ImageLineParserTest { private OptionalLineParser parser = new ImageLineParser("http://url.github.io", Optional.of("500"), Optional.empty()); private ParserContext context; @Before public void setUp(){ PostDetails postDetails = new PostDetails("title", Lists.newArrayList("/img/img1.jpg", "/img/img2.png", "/img/img3.gif")); context = new DefaultParserContext(postDetails); } @Test public void testNoImage(){ Deque stack = new ArrayDeque(); boolean result = parser.parseLine(context, buildStack("Normal line."), stack); assertFalse(result); } @Test public void testTextWithImageNameButNoTag(){ Deque stack = new ArrayDeque(); boolean result = parser.parseLine(context, buildStack("Normal line. img1 and img2.png"), stack); assertFalse(result); } @Test public void testWithImgTag(){ Deque stack = new ArrayDeque(); boolean result = parser.parseLine(context, buildStack("[img]img1:alt image text"), stack); assertTrue(result); assertEquals("<p align=\"center\">\n" + "<a href=\"http://url.github.io/img/img1.jpg\"><img src=\"http://url.github.io/img/img1.jpg\" alt=\"alt image text\" width=\"500\"></a>" + "\n</p>" + "\n"+ "<p align=\"center\"><i>alt image text</i></p>", stack.pop()); } @Test public void testWithExistingImgTag(){ Deque stack = new ArrayDeque(); boolean result = parser.parseLine(context, buildStack("[ex-img]img2.png:alt image text"), stack); assertTrue(result); assertEquals("<p align=\"center\">\n" + "<a href=\"http://url.github.io/img/img2.png\"><img src=\"http://url.github.io/img/img2.png\" alt=\"alt image text\" width=\"500\"></a>" + "\n</p>" + "\n"+ "<p align=\"center\"><i>alt image text</i></p>", stack.pop()); } private Deque buildStack(String line) { Deque stack = new ArrayDeque(); stack.push(line); return stack; } }
package fruitymod.cards; import com.megacrit.cardcrawl.actions.common.GainBlockAction; import com.megacrit.cardcrawl.actions.common.MakeTempCardInDrawPileAction; import com.megacrit.cardcrawl.cards.AbstractCard; import com.megacrit.cardcrawl.cards.status.Dazed; import com.megacrit.cardcrawl.characters.AbstractPlayer; import com.megacrit.cardcrawl.core.CardCrawlGame; import com.megacrit.cardcrawl.dungeons.AbstractDungeon; import com.megacrit.cardcrawl.localization.CardStrings; import com.megacrit.cardcrawl.monsters.AbstractMonster; import basemod.abstracts.CustomCard; import fruitymod.FruityMod; import fruitymod.patches.AbstractCardEnum; public class DisruptionField extends CustomCard { public static final String ID = "DisruptionField"; private static final CardStrings cardStrings = CardCrawlGame.languagePack.getCardStrings(ID); public static final String NAME = cardStrings.NAME; public static final String DESCRIPTION = cardStrings.DESCRIPTION; private static final int COST = 1; private static final int BLOCK = 9; private static final int BLOCK_UPGRADE = 3; private static final int DAZED_COUNT = 1; public DisruptionField() { super(ID, NAME, FruityMod.makePath(FruityMod.FLUX_SHIELD), COST, DESCRIPTION, AbstractCard.CardType.SKILL, AbstractCardEnum.SEEKER_PURPLE, AbstractCard.CardRarity.COMMON, AbstractCard.CardTarget.SELF); this.block = this.baseBlock = BLOCK; } @Override public void use(AbstractPlayer p, AbstractMonster m) { AbstractDungeon.actionManager.addToBottom(new GainBlockAction(p, p, this.block)); AbstractDungeon.actionManager.addToBottom(new MakeTempCardInDrawPileAction(new Dazed(), DAZED_COUNT, true, true)); } @Override public AbstractCard makeCopy() { return new DisruptionField(); } @Override public void upgrade() { if (!this.upgraded) { this.upgradeName(); this.upgradeBlock(BLOCK_UPGRADE); } } }
package net.minecraft.client.model; import net.minecraft.client.model.ModelBase; import net.minecraft.client.model.ModelRenderer; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.passive.EntityHorse; import net.minecraft.util.MathHelper; public class ModelHorse extends ModelBase { private ModelRenderer field_110709_a; private ModelRenderer field_178711_b; private ModelRenderer field_178712_c; private ModelRenderer field_110705_d; private ModelRenderer field_110706_e; private ModelRenderer field_110703_f; private ModelRenderer field_110704_g; private ModelRenderer field_110716_h; private ModelRenderer field_110717_i; private ModelRenderer field_110714_j; private ModelRenderer field_110715_k; private ModelRenderer field_110712_l; private ModelRenderer field_110713_m; private ModelRenderer field_110710_n; private ModelRenderer field_110711_o; private ModelRenderer field_110719_v; private ModelRenderer field_110718_w; private ModelRenderer field_110722_x; private ModelRenderer field_110721_y; private ModelRenderer field_110720_z; private ModelRenderer field_110688_A; private ModelRenderer field_110689_B; private ModelRenderer field_110690_C; private ModelRenderer field_110684_D; private ModelRenderer field_110685_E; private ModelRenderer field_110686_F; private ModelRenderer field_110687_G; private ModelRenderer field_110695_H; private ModelRenderer field_110696_I; private ModelRenderer field_110697_J; private ModelRenderer field_110698_K; private ModelRenderer field_110691_L; private ModelRenderer field_110692_M; private ModelRenderer field_110693_N; private ModelRenderer field_110694_O; private ModelRenderer field_110700_P; private ModelRenderer field_110699_Q; private ModelRenderer field_110702_R; private ModelRenderer field_110701_S; private static final String __OBFID = "CL_00000846"; public ModelHorse() { this.field_78090_t = 128; this.field_78089_u = 128; this.field_110715_k = new ModelRenderer(this, 0, 34); this.field_110715_k.func_78789_a(-5.0F, -8.0F, -19.0F, 10, 10, 24); this.field_110715_k.func_78793_a(0.0F, 11.0F, 9.0F); this.field_110712_l = new ModelRenderer(this, 44, 0); this.field_110712_l.func_78789_a(-1.0F, -1.0F, 0.0F, 2, 2, 3); this.field_110712_l.func_78793_a(0.0F, 3.0F, 14.0F); this.func_110682_a(this.field_110712_l, -1.134464F, 0.0F, 0.0F); this.field_110713_m = new ModelRenderer(this, 38, 7); this.field_110713_m.func_78789_a(-1.5F, -2.0F, 3.0F, 3, 4, 7); this.field_110713_m.func_78793_a(0.0F, 3.0F, 14.0F); this.func_110682_a(this.field_110713_m, -1.134464F, 0.0F, 0.0F); this.field_110710_n = new ModelRenderer(this, 24, 3); this.field_110710_n.func_78789_a(-1.5F, -4.5F, 9.0F, 3, 4, 7); this.field_110710_n.func_78793_a(0.0F, 3.0F, 14.0F); this.func_110682_a(this.field_110710_n, -1.40215F, 0.0F, 0.0F); this.field_110711_o = new ModelRenderer(this, 78, 29); this.field_110711_o.func_78789_a(-2.5F, -2.0F, -2.5F, 4, 9, 5); this.field_110711_o.func_78793_a(4.0F, 9.0F, 11.0F); this.field_110719_v = new ModelRenderer(this, 78, 43); this.field_110719_v.func_78789_a(-2.0F, 0.0F, -1.5F, 3, 5, 3); this.field_110719_v.func_78793_a(4.0F, 16.0F, 11.0F); this.field_110718_w = new ModelRenderer(this, 78, 51); this.field_110718_w.func_78789_a(-2.5F, 5.1F, -2.0F, 4, 3, 4); this.field_110718_w.func_78793_a(4.0F, 16.0F, 11.0F); this.field_110722_x = new ModelRenderer(this, 96, 29); this.field_110722_x.func_78789_a(-1.5F, -2.0F, -2.5F, 4, 9, 5); this.field_110722_x.func_78793_a(-4.0F, 9.0F, 11.0F); this.field_110721_y = new ModelRenderer(this, 96, 43); this.field_110721_y.func_78789_a(-1.0F, 0.0F, -1.5F, 3, 5, 3); this.field_110721_y.func_78793_a(-4.0F, 16.0F, 11.0F); this.field_110720_z = new ModelRenderer(this, 96, 51); this.field_110720_z.func_78789_a(-1.5F, 5.1F, -2.0F, 4, 3, 4); this.field_110720_z.func_78793_a(-4.0F, 16.0F, 11.0F); this.field_110688_A = new ModelRenderer(this, 44, 29); this.field_110688_A.func_78789_a(-1.9F, -1.0F, -2.1F, 3, 8, 4); this.field_110688_A.func_78793_a(4.0F, 9.0F, -8.0F); this.field_110689_B = new ModelRenderer(this, 44, 41); this.field_110689_B.func_78789_a(-1.9F, 0.0F, -1.6F, 3, 5, 3); this.field_110689_B.func_78793_a(4.0F, 16.0F, -8.0F); this.field_110690_C = new ModelRenderer(this, 44, 51); this.field_110690_C.func_78789_a(-2.4F, 5.1F, -2.1F, 4, 3, 4); this.field_110690_C.func_78793_a(4.0F, 16.0F, -8.0F); this.field_110684_D = new ModelRenderer(this, 60, 29); this.field_110684_D.func_78789_a(-1.1F, -1.0F, -2.1F, 3, 8, 4); this.field_110684_D.func_78793_a(-4.0F, 9.0F, -8.0F); this.field_110685_E = new ModelRenderer(this, 60, 41); this.field_110685_E.func_78789_a(-1.1F, 0.0F, -1.6F, 3, 5, 3); this.field_110685_E.func_78793_a(-4.0F, 16.0F, -8.0F); this.field_110686_F = new ModelRenderer(this, 60, 51); this.field_110686_F.func_78789_a(-1.6F, 5.1F, -2.1F, 4, 3, 4); this.field_110686_F.func_78793_a(-4.0F, 16.0F, -8.0F); this.field_110709_a = new ModelRenderer(this, 0, 0); this.field_110709_a.func_78789_a(-2.5F, -10.0F, -1.5F, 5, 5, 7); this.field_110709_a.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110709_a, 0.5235988F, 0.0F, 0.0F); this.field_178711_b = new ModelRenderer(this, 24, 18); this.field_178711_b.func_78789_a(-2.0F, -10.0F, -7.0F, 4, 3, 6); this.field_178711_b.func_78793_a(0.0F, 3.95F, -10.0F); this.func_110682_a(this.field_178711_b, 0.5235988F, 0.0F, 0.0F); this.field_178712_c = new ModelRenderer(this, 24, 27); this.field_178712_c.func_78789_a(-2.0F, -7.0F, -6.5F, 4, 2, 5); this.field_178712_c.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_178712_c, 0.5235988F, 0.0F, 0.0F); this.field_110709_a.func_78792_a(this.field_178711_b); this.field_110709_a.func_78792_a(this.field_178712_c); this.field_110705_d = new ModelRenderer(this, 0, 0); this.field_110705_d.func_78789_a(0.45F, -12.0F, 4.0F, 2, 3, 1); this.field_110705_d.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110705_d, 0.5235988F, 0.0F, 0.0F); this.field_110706_e = new ModelRenderer(this, 0, 0); this.field_110706_e.func_78789_a(-2.45F, -12.0F, 4.0F, 2, 3, 1); this.field_110706_e.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110706_e, 0.5235988F, 0.0F, 0.0F); this.field_110703_f = new ModelRenderer(this, 0, 12); this.field_110703_f.func_78789_a(-2.0F, -16.0F, 4.0F, 2, 7, 1); this.field_110703_f.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110703_f, 0.5235988F, 0.0F, 0.2617994F); this.field_110704_g = new ModelRenderer(this, 0, 12); this.field_110704_g.func_78789_a(0.0F, -16.0F, 4.0F, 2, 7, 1); this.field_110704_g.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110704_g, 0.5235988F, 0.0F, -0.2617994F); this.field_110716_h = new ModelRenderer(this, 0, 12); this.field_110716_h.func_78789_a(-2.05F, -9.8F, -2.0F, 4, 14, 8); this.field_110716_h.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110716_h, 0.5235988F, 0.0F, 0.0F); this.field_110687_G = new ModelRenderer(this, 0, 34); this.field_110687_G.func_78789_a(-3.0F, 0.0F, 0.0F, 8, 8, 3); this.field_110687_G.func_78793_a(-7.5F, 3.0F, 10.0F); this.func_110682_a(this.field_110687_G, 0.0F, 1.5707964F, 0.0F); this.field_110695_H = new ModelRenderer(this, 0, 47); this.field_110695_H.func_78789_a(-3.0F, 0.0F, 0.0F, 8, 8, 3); this.field_110695_H.func_78793_a(4.5F, 3.0F, 10.0F); this.func_110682_a(this.field_110695_H, 0.0F, 1.5707964F, 0.0F); this.field_110696_I = new ModelRenderer(this, 80, 0); this.field_110696_I.func_78789_a(-5.0F, 0.0F, -3.0F, 10, 1, 8); this.field_110696_I.func_78793_a(0.0F, 2.0F, 2.0F); this.field_110697_J = new ModelRenderer(this, 106, 9); this.field_110697_J.func_78789_a(-1.5F, -1.0F, -3.0F, 3, 1, 2); this.field_110697_J.func_78793_a(0.0F, 2.0F, 2.0F); this.field_110698_K = new ModelRenderer(this, 80, 9); this.field_110698_K.func_78789_a(-4.0F, -1.0F, 3.0F, 8, 1, 2); this.field_110698_K.func_78793_a(0.0F, 2.0F, 2.0F); this.field_110692_M = new ModelRenderer(this, 74, 0); this.field_110692_M.func_78789_a(-0.5F, 6.0F, -1.0F, 1, 2, 2); this.field_110692_M.func_78793_a(5.0F, 3.0F, 2.0F); this.field_110691_L = new ModelRenderer(this, 70, 0); this.field_110691_L.func_78789_a(-0.5F, 0.0F, -0.5F, 1, 6, 1); this.field_110691_L.func_78793_a(5.0F, 3.0F, 2.0F); this.field_110694_O = new ModelRenderer(this, 74, 4); this.field_110694_O.func_78789_a(-0.5F, 6.0F, -1.0F, 1, 2, 2); this.field_110694_O.func_78793_a(-5.0F, 3.0F, 2.0F); this.field_110693_N = new ModelRenderer(this, 80, 0); this.field_110693_N.func_78789_a(-0.5F, 0.0F, -0.5F, 1, 6, 1); this.field_110693_N.func_78793_a(-5.0F, 3.0F, 2.0F); this.field_110700_P = new ModelRenderer(this, 74, 13); this.field_110700_P.func_78789_a(1.5F, -8.0F, -4.0F, 1, 2, 2); this.field_110700_P.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110700_P, 0.5235988F, 0.0F, 0.0F); this.field_110699_Q = new ModelRenderer(this, 74, 13); this.field_110699_Q.func_78789_a(-2.5F, -8.0F, -4.0F, 1, 2, 2); this.field_110699_Q.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110699_Q, 0.5235988F, 0.0F, 0.0F); this.field_110702_R = new ModelRenderer(this, 44, 10); this.field_110702_R.func_78789_a(2.6F, -6.0F, -6.0F, 0, 3, 16); this.field_110702_R.func_78793_a(0.0F, 4.0F, -10.0F); this.field_110701_S = new ModelRenderer(this, 44, 5); this.field_110701_S.func_78789_a(-2.6F, -6.0F, -6.0F, 0, 3, 16); this.field_110701_S.func_78793_a(0.0F, 4.0F, -10.0F); this.field_110714_j = new ModelRenderer(this, 58, 0); this.field_110714_j.func_78789_a(-1.0F, -11.5F, 5.0F, 2, 16, 4); this.field_110714_j.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110714_j, 0.5235988F, 0.0F, 0.0F); this.field_110717_i = new ModelRenderer(this, 80, 12); this.field_110717_i.func_78790_a(-2.5F, -10.1F, -7.0F, 5, 5, 12, 0.2F); this.field_110717_i.func_78793_a(0.0F, 4.0F, -10.0F); this.func_110682_a(this.field_110717_i, 0.5235988F, 0.0F, 0.0F); } public void func_78088_a(Entity p_78088_1_, float p_78088_2_, float p_78088_3_, float p_78088_4_, float p_78088_5_, float p_78088_6_, float p_78088_7_) { EntityHorse var8 = (EntityHorse)p_78088_1_; int var9 = var8.func_110265_bP(); float var10 = var8.func_110258_o(0.0F); boolean var11 = var8.func_110228_bR(); boolean var12 = var11 && var8.func_110257_ck(); boolean var13 = var11 && var8.func_110261_ca(); boolean var14 = var9 == 1 || var9 == 2; float var15 = var8.func_110254_bY(); boolean var16 = var8.field_70153_n != null; if(var12) { this.field_110717_i.func_78785_a(p_78088_7_); this.field_110696_I.func_78785_a(p_78088_7_); this.field_110697_J.func_78785_a(p_78088_7_); this.field_110698_K.func_78785_a(p_78088_7_); this.field_110691_L.func_78785_a(p_78088_7_); this.field_110692_M.func_78785_a(p_78088_7_); this.field_110693_N.func_78785_a(p_78088_7_); this.field_110694_O.func_78785_a(p_78088_7_); this.field_110700_P.func_78785_a(p_78088_7_); this.field_110699_Q.func_78785_a(p_78088_7_); if(var16) { this.field_110702_R.func_78785_a(p_78088_7_); this.field_110701_S.func_78785_a(p_78088_7_); } } if(!var11) { GlStateManager.func_179094_E(); GlStateManager.func_179152_a(var15, 0.5F + var15 * 0.5F, var15); GlStateManager.func_179109_b(0.0F, 0.95F * (1.0F - var15), 0.0F); } this.field_110711_o.func_78785_a(p_78088_7_); this.field_110719_v.func_78785_a(p_78088_7_); this.field_110718_w.func_78785_a(p_78088_7_); this.field_110722_x.func_78785_a(p_78088_7_); this.field_110721_y.func_78785_a(p_78088_7_); this.field_110720_z.func_78785_a(p_78088_7_); this.field_110688_A.func_78785_a(p_78088_7_); this.field_110689_B.func_78785_a(p_78088_7_); this.field_110690_C.func_78785_a(p_78088_7_); this.field_110684_D.func_78785_a(p_78088_7_); this.field_110685_E.func_78785_a(p_78088_7_); this.field_110686_F.func_78785_a(p_78088_7_); if(!var11) { GlStateManager.func_179121_F(); GlStateManager.func_179094_E(); GlStateManager.func_179152_a(var15, var15, var15); GlStateManager.func_179109_b(0.0F, 1.35F * (1.0F - var15), 0.0F); } this.field_110715_k.func_78785_a(p_78088_7_); this.field_110712_l.func_78785_a(p_78088_7_); this.field_110713_m.func_78785_a(p_78088_7_); this.field_110710_n.func_78785_a(p_78088_7_); this.field_110716_h.func_78785_a(p_78088_7_); this.field_110714_j.func_78785_a(p_78088_7_); if(!var11) { GlStateManager.func_179121_F(); GlStateManager.func_179094_E(); float var17 = 0.5F + var15 * var15 * 0.5F; GlStateManager.func_179152_a(var17, var17, var17); if(var10 <= 0.0F) { GlStateManager.func_179109_b(0.0F, 1.35F * (1.0F - var15), 0.0F); } else { GlStateManager.func_179109_b(0.0F, 0.9F * (1.0F - var15) * var10 + 1.35F * (1.0F - var15) * (1.0F - var10), 0.15F * (1.0F - var15) * var10); } } if(var14) { this.field_110703_f.func_78785_a(p_78088_7_); this.field_110704_g.func_78785_a(p_78088_7_); } else { this.field_110705_d.func_78785_a(p_78088_7_); this.field_110706_e.func_78785_a(p_78088_7_); } this.field_110709_a.func_78785_a(p_78088_7_); if(!var11) { GlStateManager.func_179121_F(); } if(var13) { this.field_110687_G.func_78785_a(p_78088_7_); this.field_110695_H.func_78785_a(p_78088_7_); } } private void func_110682_a(ModelRenderer p_110682_1_, float p_110682_2_, float p_110682_3_, float p_110682_4_) { p_110682_1_.field_78795_f = p_110682_2_; p_110682_1_.field_78796_g = p_110682_3_; p_110682_1_.field_78808_h = p_110682_4_; } private float func_110683_a(float p_110683_1_, float p_110683_2_, float p_110683_3_) { float var4; for(var4 = p_110683_2_ - p_110683_1_; var4 < -180.0F; var4 += 360.0F) { ; } while(var4 >= 180.0F) { var4 -= 360.0F; } return p_110683_1_ + p_110683_3_ * var4; } public void func_78086_a(EntityLivingBase p_78086_1_, float p_78086_2_, float p_78086_3_, float p_78086_4_) { super.func_78086_a(p_78086_1_, p_78086_2_, p_78086_3_, p_78086_4_); float var5 = this.func_110683_a(p_78086_1_.field_70760_ar, p_78086_1_.field_70761_aq, p_78086_4_); float var6 = this.func_110683_a(p_78086_1_.field_70758_at, p_78086_1_.field_70759_as, p_78086_4_); float var7 = p_78086_1_.field_70127_C + (p_78086_1_.field_70125_A - p_78086_1_.field_70127_C) * p_78086_4_; float var8 = var6 - var5; float var9 = var7 / 57.295776F; if(var8 > 20.0F) { var8 = 20.0F; } if(var8 < -20.0F) { var8 = -20.0F; } if(p_78086_3_ > 0.2F) { var9 += MathHelper.func_76134_b(p_78086_2_ * 0.4F) * 0.15F * p_78086_3_; } EntityHorse var10 = (EntityHorse)p_78086_1_; float var11 = var10.func_110258_o(p_78086_4_); float var12 = var10.func_110223_p(p_78086_4_); float var13 = 1.0F - var12; float var14 = var10.func_110201_q(p_78086_4_); boolean var15 = var10.field_110278_bp != 0; boolean var16 = var10.func_110257_ck(); boolean var17 = var10.field_70153_n != null; float var18 = (float)p_78086_1_.field_70173_aa + p_78086_4_; float var19 = MathHelper.func_76134_b(p_78086_2_ * 0.6662F + 3.1415927F); float var20 = var19 * 0.8F * p_78086_3_; this.field_110709_a.field_78797_d = 4.0F; this.field_110709_a.field_78798_e = -10.0F; this.field_110712_l.field_78797_d = 3.0F; this.field_110713_m.field_78798_e = 14.0F; this.field_110695_H.field_78797_d = 3.0F; this.field_110695_H.field_78798_e = 10.0F; this.field_110715_k.field_78795_f = 0.0F; this.field_110709_a.field_78795_f = 0.5235988F + var9; this.field_110709_a.field_78796_g = var8 / 57.295776F; this.field_110709_a.field_78795_f = var12 * (0.2617994F + var9) + var11 * 2.18166F + (1.0F - Math.max(var12, var11)) * this.field_110709_a.field_78795_f; this.field_110709_a.field_78796_g = var12 * var8 / 57.295776F + (1.0F - Math.max(var12, var11)) * this.field_110709_a.field_78796_g; this.field_110709_a.field_78797_d = var12 * -6.0F + var11 * 11.0F + (1.0F - Math.max(var12, var11)) * this.field_110709_a.field_78797_d; this.field_110709_a.field_78798_e = var12 * -1.0F + var11 * -10.0F + (1.0F - Math.max(var12, var11)) * this.field_110709_a.field_78798_e; this.field_110712_l.field_78797_d = var12 * 9.0F + var13 * this.field_110712_l.field_78797_d; this.field_110713_m.field_78798_e = var12 * 18.0F + var13 * this.field_110713_m.field_78798_e; this.field_110695_H.field_78797_d = var12 * 5.5F + var13 * this.field_110695_H.field_78797_d; this.field_110695_H.field_78798_e = var12 * 15.0F + var13 * this.field_110695_H.field_78798_e; this.field_110715_k.field_78795_f = var12 * -45.0F / 57.295776F + var13 * this.field_110715_k.field_78795_f; this.field_110705_d.field_78797_d = this.field_110709_a.field_78797_d; this.field_110706_e.field_78797_d = this.field_110709_a.field_78797_d; this.field_110703_f.field_78797_d = this.field_110709_a.field_78797_d; this.field_110704_g.field_78797_d = this.field_110709_a.field_78797_d; this.field_110716_h.field_78797_d = this.field_110709_a.field_78797_d; this.field_178711_b.field_78797_d = 0.02F; this.field_178712_c.field_78797_d = 0.0F; this.field_110714_j.field_78797_d = this.field_110709_a.field_78797_d; this.field_110705_d.field_78798_e = this.field_110709_a.field_78798_e; this.field_110706_e.field_78798_e = this.field_110709_a.field_78798_e; this.field_110703_f.field_78798_e = this.field_110709_a.field_78798_e; this.field_110704_g.field_78798_e = this.field_110709_a.field_78798_e; this.field_110716_h.field_78798_e = this.field_110709_a.field_78798_e; this.field_178711_b.field_78798_e = 0.02F - var14 * 1.0F; this.field_178712_c.field_78798_e = 0.0F + var14 * 1.0F; this.field_110714_j.field_78798_e = this.field_110709_a.field_78798_e; this.field_110705_d.field_78795_f = this.field_110709_a.field_78795_f; this.field_110706_e.field_78795_f = this.field_110709_a.field_78795_f; this.field_110703_f.field_78795_f = this.field_110709_a.field_78795_f; this.field_110704_g.field_78795_f = this.field_110709_a.field_78795_f; this.field_110716_h.field_78795_f = this.field_110709_a.field_78795_f; this.field_178711_b.field_78795_f = 0.0F - 0.09424778F * var14; this.field_178712_c.field_78795_f = 0.0F + 0.15707964F * var14; this.field_110714_j.field_78795_f = this.field_110709_a.field_78795_f; this.field_110705_d.field_78796_g = this.field_110709_a.field_78796_g; this.field_110706_e.field_78796_g = this.field_110709_a.field_78796_g; this.field_110703_f.field_78796_g = this.field_110709_a.field_78796_g; this.field_110704_g.field_78796_g = this.field_110709_a.field_78796_g; this.field_110716_h.field_78796_g = this.field_110709_a.field_78796_g; this.field_178711_b.field_78796_g = 0.0F; this.field_178712_c.field_78796_g = 0.0F; this.field_110714_j.field_78796_g = this.field_110709_a.field_78796_g; this.field_110687_G.field_78795_f = var20 / 5.0F; this.field_110695_H.field_78795_f = -var20 / 5.0F; float var21 = 1.5707964F; float var22 = 4.712389F; float var23 = -1.0471976F; float var24 = 0.2617994F * var12; float var25 = MathHelper.func_76134_b(var18 * 0.6F + 3.1415927F); this.field_110688_A.field_78797_d = -2.0F * var12 + 9.0F * var13; this.field_110688_A.field_78798_e = -2.0F * var12 + -8.0F * var13; this.field_110684_D.field_78797_d = this.field_110688_A.field_78797_d; this.field_110684_D.field_78798_e = this.field_110688_A.field_78798_e; this.field_110719_v.field_78797_d = this.field_110711_o.field_78797_d + MathHelper.func_76126_a(1.5707964F + var24 + var13 * -var19 * 0.5F * p_78086_3_) * 7.0F; this.field_110719_v.field_78798_e = this.field_110711_o.field_78798_e + MathHelper.func_76134_b(4.712389F + var24 + var13 * -var19 * 0.5F * p_78086_3_) * 7.0F; this.field_110721_y.field_78797_d = this.field_110722_x.field_78797_d + MathHelper.func_76126_a(1.5707964F + var24 + var13 * var19 * 0.5F * p_78086_3_) * 7.0F; this.field_110721_y.field_78798_e = this.field_110722_x.field_78798_e + MathHelper.func_76134_b(4.712389F + var24 + var13 * var19 * 0.5F * p_78086_3_) * 7.0F; float var26 = (-1.0471976F + var25) * var12 + var20 * var13; float var27 = (-1.0471976F + -var25) * var12 + -var20 * var13; this.field_110689_B.field_78797_d = this.field_110688_A.field_78797_d + MathHelper.func_76126_a(1.5707964F + var26) * 7.0F; this.field_110689_B.field_78798_e = this.field_110688_A.field_78798_e + MathHelper.func_76134_b(4.712389F + var26) * 7.0F; this.field_110685_E.field_78797_d = this.field_110684_D.field_78797_d + MathHelper.func_76126_a(1.5707964F + var27) * 7.0F; this.field_110685_E.field_78798_e = this.field_110684_D.field_78798_e + MathHelper.func_76134_b(4.712389F + var27) * 7.0F; this.field_110711_o.field_78795_f = var24 + -var19 * 0.5F * p_78086_3_ * var13; this.field_110719_v.field_78795_f = -0.08726646F * var12 + (-var19 * 0.5F * p_78086_3_ - Math.max(0.0F, var19 * 0.5F * p_78086_3_)) * var13; this.field_110718_w.field_78795_f = this.field_110719_v.field_78795_f; this.field_110722_x.field_78795_f = var24 + var19 * 0.5F * p_78086_3_ * var13; this.field_110721_y.field_78795_f = -0.08726646F * var12 + (var19 * 0.5F * p_78086_3_ - Math.max(0.0F, -var19 * 0.5F * p_78086_3_)) * var13; this.field_110720_z.field_78795_f = this.field_110721_y.field_78795_f; this.field_110688_A.field_78795_f = var26; this.field_110689_B.field_78795_f = (this.field_110688_A.field_78795_f + 3.1415927F * Math.max(0.0F, 0.2F + var25 * 0.2F)) * var12 + (var20 + Math.max(0.0F, var19 * 0.5F * p_78086_3_)) * var13; this.field_110690_C.field_78795_f = this.field_110689_B.field_78795_f; this.field_110684_D.field_78795_f = var27; this.field_110685_E.field_78795_f = (this.field_110684_D.field_78795_f + 3.1415927F * Math.max(0.0F, 0.2F - var25 * 0.2F)) * var12 + (-var20 + Math.max(0.0F, -var19 * 0.5F * p_78086_3_)) * var13; this.field_110686_F.field_78795_f = this.field_110685_E.field_78795_f; this.field_110718_w.field_78797_d = this.field_110719_v.field_78797_d; this.field_110718_w.field_78798_e = this.field_110719_v.field_78798_e; this.field_110720_z.field_78797_d = this.field_110721_y.field_78797_d; this.field_110720_z.field_78798_e = this.field_110721_y.field_78798_e; this.field_110690_C.field_78797_d = this.field_110689_B.field_78797_d; this.field_110690_C.field_78798_e = this.field_110689_B.field_78798_e; this.field_110686_F.field_78797_d = this.field_110685_E.field_78797_d; this.field_110686_F.field_78798_e = this.field_110685_E.field_78798_e; if(var16) { this.field_110696_I.field_78797_d = var12 * 0.5F + var13 * 2.0F; this.field_110696_I.field_78798_e = var12 * 11.0F + var13 * 2.0F; this.field_110697_J.field_78797_d = this.field_110696_I.field_78797_d; this.field_110698_K.field_78797_d = this.field_110696_I.field_78797_d; this.field_110691_L.field_78797_d = this.field_110696_I.field_78797_d; this.field_110693_N.field_78797_d = this.field_110696_I.field_78797_d; this.field_110692_M.field_78797_d = this.field_110696_I.field_78797_d; this.field_110694_O.field_78797_d = this.field_110696_I.field_78797_d; this.field_110687_G.field_78797_d = this.field_110695_H.field_78797_d; this.field_110697_J.field_78798_e = this.field_110696_I.field_78798_e; this.field_110698_K.field_78798_e = this.field_110696_I.field_78798_e; this.field_110691_L.field_78798_e = this.field_110696_I.field_78798_e; this.field_110693_N.field_78798_e = this.field_110696_I.field_78798_e; this.field_110692_M.field_78798_e = this.field_110696_I.field_78798_e; this.field_110694_O.field_78798_e = this.field_110696_I.field_78798_e; this.field_110687_G.field_78798_e = this.field_110695_H.field_78798_e; this.field_110696_I.field_78795_f = this.field_110715_k.field_78795_f; this.field_110697_J.field_78795_f = this.field_110715_k.field_78795_f; this.field_110698_K.field_78795_f = this.field_110715_k.field_78795_f; this.field_110702_R.field_78797_d = this.field_110709_a.field_78797_d; this.field_110701_S.field_78797_d = this.field_110709_a.field_78797_d; this.field_110717_i.field_78797_d = this.field_110709_a.field_78797_d; this.field_110700_P.field_78797_d = this.field_110709_a.field_78797_d; this.field_110699_Q.field_78797_d = this.field_110709_a.field_78797_d; this.field_110702_R.field_78798_e = this.field_110709_a.field_78798_e; this.field_110701_S.field_78798_e = this.field_110709_a.field_78798_e; this.field_110717_i.field_78798_e = this.field_110709_a.field_78798_e; this.field_110700_P.field_78798_e = this.field_110709_a.field_78798_e; this.field_110699_Q.field_78798_e = this.field_110709_a.field_78798_e; this.field_110702_R.field_78795_f = var9; this.field_110701_S.field_78795_f = var9; this.field_110717_i.field_78795_f = this.field_110709_a.field_78795_f; this.field_110700_P.field_78795_f = this.field_110709_a.field_78795_f; this.field_110699_Q.field_78795_f = this.field_110709_a.field_78795_f; this.field_110717_i.field_78796_g = this.field_110709_a.field_78796_g; this.field_110700_P.field_78796_g = this.field_110709_a.field_78796_g; this.field_110702_R.field_78796_g = this.field_110709_a.field_78796_g; this.field_110699_Q.field_78796_g = this.field_110709_a.field_78796_g; this.field_110701_S.field_78796_g = this.field_110709_a.field_78796_g; if(var17) { this.field_110691_L.field_78795_f = -1.0471976F; this.field_110692_M.field_78795_f = -1.0471976F; this.field_110693_N.field_78795_f = -1.0471976F; this.field_110694_O.field_78795_f = -1.0471976F; this.field_110691_L.field_78808_h = 0.0F; this.field_110692_M.field_78808_h = 0.0F; this.field_110693_N.field_78808_h = 0.0F; this.field_110694_O.field_78808_h = 0.0F; } else { this.field_110691_L.field_78795_f = var20 / 3.0F; this.field_110692_M.field_78795_f = var20 / 3.0F; this.field_110693_N.field_78795_f = var20 / 3.0F; this.field_110694_O.field_78795_f = var20 / 3.0F; this.field_110691_L.field_78808_h = var20 / 5.0F; this.field_110692_M.field_78808_h = var20 / 5.0F; this.field_110693_N.field_78808_h = -var20 / 5.0F; this.field_110694_O.field_78808_h = -var20 / 5.0F; } } var21 = -1.3089F + p_78086_3_ * 1.5F; if(var21 > 0.0F) { var21 = 0.0F; } if(var15) { this.field_110712_l.field_78796_g = MathHelper.func_76134_b(var18 * 0.7F); var21 = 0.0F; } else { this.field_110712_l.field_78796_g = 0.0F; } this.field_110713_m.field_78796_g = this.field_110712_l.field_78796_g; this.field_110710_n.field_78796_g = this.field_110712_l.field_78796_g; this.field_110713_m.field_78797_d = this.field_110712_l.field_78797_d; this.field_110710_n.field_78797_d = this.field_110712_l.field_78797_d; this.field_110713_m.field_78798_e = this.field_110712_l.field_78798_e; this.field_110710_n.field_78798_e = this.field_110712_l.field_78798_e; this.field_110712_l.field_78795_f = var21; this.field_110713_m.field_78795_f = var21; this.field_110710_n.field_78795_f = -0.2618F + var21; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.openstack.neutron.v2.functions; import static com.google.common.base.Preconditions.checkNotNull; import javax.inject.Inject; import org.jclouds.collect.IterableWithMarker; import org.jclouds.collect.internal.Arg0ToPagedIterable; import org.jclouds.openstack.neutron.v2.NeutronApi; import org.jclouds.openstack.neutron.v2.domain.FirewallPolicy; import org.jclouds.openstack.neutron.v2.extensions.FWaaSApi; import com.google.common.base.Function; import com.google.common.base.Optional; /** * Ensures FirewallRule works as PagedIterable. */ public class FirewallPolicyToPagedIterable extends Arg0ToPagedIterable.FromCaller<FirewallPolicy, FirewallPolicyToPagedIterable> { private final NeutronApi api; @Inject protected FirewallPolicyToPagedIterable(NeutronApi api) { this.api = checkNotNull(api, "api"); } @Override protected Function<Object, IterableWithMarker<FirewallPolicy>> markerToNextForArg0(Optional<Object> arg0) { String region = arg0.isPresent() ? arg0.get().toString() : null; final FWaaSApi firewallApi = api.getFWaaSApi(region).get(); return new Function<Object, IterableWithMarker<FirewallPolicy>>() { @SuppressWarnings("unchecked") @Override public IterableWithMarker<FirewallPolicy> apply(Object input) { return IterableWithMarker.class.cast(firewallApi.listFirewallPolicies()); } @Override public String toString() { return "listFirewallPolicies()"; } }; } }
package com.beloo.widget.chipslayoutmanager.layouter; import android.support.annotation.IntRange; import java.util.Iterator; public abstract class AbstractPositionIterator implements Iterator<Integer> { int pos; int itemCount; AbstractPositionIterator(@IntRange(from = 0) int itemCount) { if (itemCount < 0) throw new IllegalArgumentException("item count couldn't be negative"); this.itemCount = itemCount; } public void move(@IntRange(from = 0) int pos) { if (pos >= itemCount) { this.pos = itemCount; } if (pos < 0) throw new IllegalArgumentException("can't move to negative position"); this.pos = pos; } @Override public void remove() { throw new UnsupportedOperationException("removing not supported in position iterator"); } }
/* * Copyright 2011-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model.transform; import java.io.ByteArrayInputStream; import java.util.Collections; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.simpleworkflow.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.protocol.json.*; /** * RequestCancelWorkflowExecutionRequest Marshaller */ public class RequestCancelWorkflowExecutionRequestMarshaller implements Marshaller<Request<RequestCancelWorkflowExecutionRequest>, RequestCancelWorkflowExecutionRequest> { private final SdkJsonProtocolFactory protocolFactory; public RequestCancelWorkflowExecutionRequestMarshaller(SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<RequestCancelWorkflowExecutionRequest> marshall(RequestCancelWorkflowExecutionRequest requestCancelWorkflowExecutionRequest) { if (requestCancelWorkflowExecutionRequest == null) { throw new AmazonClientException("Invalid argument passed to marshall(...)"); } Request<RequestCancelWorkflowExecutionRequest> request = new DefaultRequest<RequestCancelWorkflowExecutionRequest>( requestCancelWorkflowExecutionRequest, "AmazonSimpleWorkflow"); request.addHeader("X-Amz-Target", "SimpleWorkflowService.RequestCancelWorkflowExecution"); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { final StructuredJsonGenerator jsonGenerator = protocolFactory.createGenerator(); jsonGenerator.writeStartObject(); if (requestCancelWorkflowExecutionRequest.getDomain() != null) { jsonGenerator.writeFieldName("domain").writeValue(requestCancelWorkflowExecutionRequest.getDomain()); } if (requestCancelWorkflowExecutionRequest.getWorkflowId() != null) { jsonGenerator.writeFieldName("workflowId").writeValue(requestCancelWorkflowExecutionRequest.getWorkflowId()); } if (requestCancelWorkflowExecutionRequest.getRunId() != null) { jsonGenerator.writeFieldName("runId").writeValue(requestCancelWorkflowExecutionRequest.getRunId()); } jsonGenerator.writeEndObject(); byte[] content = jsonGenerator.getBytes(); request.setContent(new ByteArrayInputStream(content)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", protocolFactory.getContentType()); } catch (Throwable t) { throw new AmazonClientException("Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
/* * Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticloadbalancing.model; /** * <p> * The HealthCheck data type. * </p> */ public class HealthCheck { /** * Specifies the instance being checked. The protocol is either TCP, * HTTP, HTTPS, or SSL. The range of valid ports is one (1) through * 65535. <note> <p> TCP is the default, specified as a TCP: port pair, * for example "TCP:5000". In this case a healthcheck simply attempts to * open a TCP connection to the instance on the specified port. Failure * to connect within the configured timeout is considered unhealthy. * <p>SSL is also specified as SSL: port pair, for example, SSL:5000. <p> * For HTTP or HTTPS protocol, the situation is different. You have to * include a ping path in the string. HTTP is specified as a * HTTP:port;/;PathToPing; grouping, for example * "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is * issued to the instance on the given port and path. Any answer other * than "200 OK" within the timeout period is considered unhealthy. <p> * The total length of the HTTP ping target needs to be 1024 16-bit * Unicode characters or less. </note> */ private String target; /** * Specifies the approximate interval, in seconds, between health checks * of an individual instance. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 300<br/> */ private Integer interval; /** * Specifies the amount of time, in seconds, during which no response * means a failed health probe. <note> This value must be less than the * <i>Interval</i> value. </note> * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 300<br/> */ private Integer timeout; /** * Specifies the number of consecutive health probe failures required * before moving the instance to the <i>Unhealthy</i> state. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>2 - 10<br/> */ private Integer unhealthyThreshold; /** * Specifies the number of consecutive health probe successes required * before moving the instance to the <i>Healthy</i> state. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>2 - 10<br/> */ private Integer healthyThreshold; /** * Default constructor for a new HealthCheck object. Callers should use the * setter or fluent setter (with...) methods to initialize this object after creating it. */ public HealthCheck() {} /** * Constructs a new HealthCheck object. * Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param target Specifies the instance being checked. The protocol is * either TCP, HTTP, HTTPS, or SSL. The range of valid ports is one (1) * through 65535. <note> <p> TCP is the default, specified as a TCP: port * pair, for example "TCP:5000". In this case a healthcheck simply * attempts to open a TCP connection to the instance on the specified * port. Failure to connect within the configured timeout is considered * unhealthy. <p>SSL is also specified as SSL: port pair, for example, * SSL:5000. <p> For HTTP or HTTPS protocol, the situation is different. * You have to include a ping path in the string. HTTP is specified as a * HTTP:port;/;PathToPing; grouping, for example * "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is * issued to the instance on the given port and path. Any answer other * than "200 OK" within the timeout period is considered unhealthy. <p> * The total length of the HTTP ping target needs to be 1024 16-bit * Unicode characters or less. </note> * @param interval Specifies the approximate interval, in seconds, * between health checks of an individual instance. * @param timeout Specifies the amount of time, in seconds, during which * no response means a failed health probe. <note> This value must be * less than the <i>Interval</i> value. </note> * @param unhealthyThreshold Specifies the number of consecutive health * probe failures required before moving the instance to the * <i>Unhealthy</i> state. * @param healthyThreshold Specifies the number of consecutive health * probe successes required before moving the instance to the * <i>Healthy</i> state. */ public HealthCheck(String target, Integer interval, Integer timeout, Integer unhealthyThreshold, Integer healthyThreshold) { this.target = target; this.interval = interval; this.timeout = timeout; this.unhealthyThreshold = unhealthyThreshold; this.healthyThreshold = healthyThreshold; } /** * Specifies the instance being checked. The protocol is either TCP, * HTTP, HTTPS, or SSL. The range of valid ports is one (1) through * 65535. <note> <p> TCP is the default, specified as a TCP: port pair, * for example "TCP:5000". In this case a healthcheck simply attempts to * open a TCP connection to the instance on the specified port. Failure * to connect within the configured timeout is considered unhealthy. * <p>SSL is also specified as SSL: port pair, for example, SSL:5000. <p> * For HTTP or HTTPS protocol, the situation is different. You have to * include a ping path in the string. HTTP is specified as a * HTTP:port;/;PathToPing; grouping, for example * "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is * issued to the instance on the given port and path. Any answer other * than "200 OK" within the timeout period is considered unhealthy. <p> * The total length of the HTTP ping target needs to be 1024 16-bit * Unicode characters or less. </note> * * @return Specifies the instance being checked. The protocol is either TCP, * HTTP, HTTPS, or SSL. The range of valid ports is one (1) through * 65535. <note> <p> TCP is the default, specified as a TCP: port pair, * for example "TCP:5000". In this case a healthcheck simply attempts to * open a TCP connection to the instance on the specified port. Failure * to connect within the configured timeout is considered unhealthy. * <p>SSL is also specified as SSL: port pair, for example, SSL:5000. <p> * For HTTP or HTTPS protocol, the situation is different. You have to * include a ping path in the string. HTTP is specified as a * HTTP:port;/;PathToPing; grouping, for example * "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is * issued to the instance on the given port and path. Any answer other * than "200 OK" within the timeout period is considered unhealthy. <p> * The total length of the HTTP ping target needs to be 1024 16-bit * Unicode characters or less. </note> */ public String getTarget() { return target; } /** * Specifies the instance being checked. The protocol is either TCP, * HTTP, HTTPS, or SSL. The range of valid ports is one (1) through * 65535. <note> <p> TCP is the default, specified as a TCP: port pair, * for example "TCP:5000". In this case a healthcheck simply attempts to * open a TCP connection to the instance on the specified port. Failure * to connect within the configured timeout is considered unhealthy. * <p>SSL is also specified as SSL: port pair, for example, SSL:5000. <p> * For HTTP or HTTPS protocol, the situation is different. You have to * include a ping path in the string. HTTP is specified as a * HTTP:port;/;PathToPing; grouping, for example * "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is * issued to the instance on the given port and path. Any answer other * than "200 OK" within the timeout period is considered unhealthy. <p> * The total length of the HTTP ping target needs to be 1024 16-bit * Unicode characters or less. </note> * * @param target Specifies the instance being checked. The protocol is either TCP, * HTTP, HTTPS, or SSL. The range of valid ports is one (1) through * 65535. <note> <p> TCP is the default, specified as a TCP: port pair, * for example "TCP:5000". In this case a healthcheck simply attempts to * open a TCP connection to the instance on the specified port. Failure * to connect within the configured timeout is considered unhealthy. * <p>SSL is also specified as SSL: port pair, for example, SSL:5000. <p> * For HTTP or HTTPS protocol, the situation is different. You have to * include a ping path in the string. HTTP is specified as a * HTTP:port;/;PathToPing; grouping, for example * "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is * issued to the instance on the given port and path. Any answer other * than "200 OK" within the timeout period is considered unhealthy. <p> * The total length of the HTTP ping target needs to be 1024 16-bit * Unicode characters or less. </note> */ public void setTarget(String target) { this.target = target; } /** * Specifies the instance being checked. The protocol is either TCP, * HTTP, HTTPS, or SSL. The range of valid ports is one (1) through * 65535. <note> <p> TCP is the default, specified as a TCP: port pair, * for example "TCP:5000". In this case a healthcheck simply attempts to * open a TCP connection to the instance on the specified port. Failure * to connect within the configured timeout is considered unhealthy. * <p>SSL is also specified as SSL: port pair, for example, SSL:5000. <p> * For HTTP or HTTPS protocol, the situation is different. You have to * include a ping path in the string. HTTP is specified as a * HTTP:port;/;PathToPing; grouping, for example * "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is * issued to the instance on the given port and path. Any answer other * than "200 OK" within the timeout period is considered unhealthy. <p> * The total length of the HTTP ping target needs to be 1024 16-bit * Unicode characters or less. </note> * <p> * Returns a reference to this object so that method calls can be chained together. * * @param target Specifies the instance being checked. The protocol is either TCP, * HTTP, HTTPS, or SSL. The range of valid ports is one (1) through * 65535. <note> <p> TCP is the default, specified as a TCP: port pair, * for example "TCP:5000". In this case a healthcheck simply attempts to * open a TCP connection to the instance on the specified port. Failure * to connect within the configured timeout is considered unhealthy. * <p>SSL is also specified as SSL: port pair, for example, SSL:5000. <p> * For HTTP or HTTPS protocol, the situation is different. You have to * include a ping path in the string. HTTP is specified as a * HTTP:port;/;PathToPing; grouping, for example * "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is * issued to the instance on the given port and path. Any answer other * than "200 OK" within the timeout period is considered unhealthy. <p> * The total length of the HTTP ping target needs to be 1024 16-bit * Unicode characters or less. </note> * * @return A reference to this updated object so that method calls can be chained * together. */ public HealthCheck withTarget(String target) { this.target = target; return this; } /** * Specifies the approximate interval, in seconds, between health checks * of an individual instance. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 300<br/> * * @return Specifies the approximate interval, in seconds, between health checks * of an individual instance. */ public Integer getInterval() { return interval; } /** * Specifies the approximate interval, in seconds, between health checks * of an individual instance. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 300<br/> * * @param interval Specifies the approximate interval, in seconds, between health checks * of an individual instance. */ public void setInterval(Integer interval) { this.interval = interval; } /** * Specifies the approximate interval, in seconds, between health checks * of an individual instance. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 300<br/> * * @param interval Specifies the approximate interval, in seconds, between health checks * of an individual instance. * * @return A reference to this updated object so that method calls can be chained * together. */ public HealthCheck withInterval(Integer interval) { this.interval = interval; return this; } /** * Specifies the amount of time, in seconds, during which no response * means a failed health probe. <note> This value must be less than the * <i>Interval</i> value. </note> * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 300<br/> * * @return Specifies the amount of time, in seconds, during which no response * means a failed health probe. <note> This value must be less than the * <i>Interval</i> value. </note> */ public Integer getTimeout() { return timeout; } /** * Specifies the amount of time, in seconds, during which no response * means a failed health probe. <note> This value must be less than the * <i>Interval</i> value. </note> * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 300<br/> * * @param timeout Specifies the amount of time, in seconds, during which no response * means a failed health probe. <note> This value must be less than the * <i>Interval</i> value. </note> */ public void setTimeout(Integer timeout) { this.timeout = timeout; } /** * Specifies the amount of time, in seconds, during which no response * means a failed health probe. <note> This value must be less than the * <i>Interval</i> value. </note> * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 300<br/> * * @param timeout Specifies the amount of time, in seconds, during which no response * means a failed health probe. <note> This value must be less than the * <i>Interval</i> value. </note> * * @return A reference to this updated object so that method calls can be chained * together. */ public HealthCheck withTimeout(Integer timeout) { this.timeout = timeout; return this; } /** * Specifies the number of consecutive health probe failures required * before moving the instance to the <i>Unhealthy</i> state. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>2 - 10<br/> * * @return Specifies the number of consecutive health probe failures required * before moving the instance to the <i>Unhealthy</i> state. */ public Integer getUnhealthyThreshold() { return unhealthyThreshold; } /** * Specifies the number of consecutive health probe failures required * before moving the instance to the <i>Unhealthy</i> state. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>2 - 10<br/> * * @param unhealthyThreshold Specifies the number of consecutive health probe failures required * before moving the instance to the <i>Unhealthy</i> state. */ public void setUnhealthyThreshold(Integer unhealthyThreshold) { this.unhealthyThreshold = unhealthyThreshold; } /** * Specifies the number of consecutive health probe failures required * before moving the instance to the <i>Unhealthy</i> state. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>2 - 10<br/> * * @param unhealthyThreshold Specifies the number of consecutive health probe failures required * before moving the instance to the <i>Unhealthy</i> state. * * @return A reference to this updated object so that method calls can be chained * together. */ public HealthCheck withUnhealthyThreshold(Integer unhealthyThreshold) { this.unhealthyThreshold = unhealthyThreshold; return this; } /** * Specifies the number of consecutive health probe successes required * before moving the instance to the <i>Healthy</i> state. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>2 - 10<br/> * * @return Specifies the number of consecutive health probe successes required * before moving the instance to the <i>Healthy</i> state. */ public Integer getHealthyThreshold() { return healthyThreshold; } /** * Specifies the number of consecutive health probe successes required * before moving the instance to the <i>Healthy</i> state. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>2 - 10<br/> * * @param healthyThreshold Specifies the number of consecutive health probe successes required * before moving the instance to the <i>Healthy</i> state. */ public void setHealthyThreshold(Integer healthyThreshold) { this.healthyThreshold = healthyThreshold; } /** * Specifies the number of consecutive health probe successes required * before moving the instance to the <i>Healthy</i> state. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>2 - 10<br/> * * @param healthyThreshold Specifies the number of consecutive health probe successes required * before moving the instance to the <i>Healthy</i> state. * * @return A reference to this updated object so that method calls can be chained * together. */ public HealthCheck withHealthyThreshold(Integer healthyThreshold) { this.healthyThreshold = healthyThreshold; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (target != null) sb.append("Target: " + target + ", "); if (interval != null) sb.append("Interval: " + interval + ", "); if (timeout != null) sb.append("Timeout: " + timeout + ", "); if (unhealthyThreshold != null) sb.append("UnhealthyThreshold: " + unhealthyThreshold + ", "); if (healthyThreshold != null) sb.append("HealthyThreshold: " + healthyThreshold + ", "); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTarget() == null) ? 0 : getTarget().hashCode()); hashCode = prime * hashCode + ((getInterval() == null) ? 0 : getInterval().hashCode()); hashCode = prime * hashCode + ((getTimeout() == null) ? 0 : getTimeout().hashCode()); hashCode = prime * hashCode + ((getUnhealthyThreshold() == null) ? 0 : getUnhealthyThreshold().hashCode()); hashCode = prime * hashCode + ((getHealthyThreshold() == null) ? 0 : getHealthyThreshold().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof HealthCheck == false) return false; HealthCheck other = (HealthCheck)obj; if (other.getTarget() == null ^ this.getTarget() == null) return false; if (other.getTarget() != null && other.getTarget().equals(this.getTarget()) == false) return false; if (other.getInterval() == null ^ this.getInterval() == null) return false; if (other.getInterval() != null && other.getInterval().equals(this.getInterval()) == false) return false; if (other.getTimeout() == null ^ this.getTimeout() == null) return false; if (other.getTimeout() != null && other.getTimeout().equals(this.getTimeout()) == false) return false; if (other.getUnhealthyThreshold() == null ^ this.getUnhealthyThreshold() == null) return false; if (other.getUnhealthyThreshold() != null && other.getUnhealthyThreshold().equals(this.getUnhealthyThreshold()) == false) return false; if (other.getHealthyThreshold() == null ^ this.getHealthyThreshold() == null) return false; if (other.getHealthyThreshold() != null && other.getHealthyThreshold().equals(this.getHealthyThreshold()) == false) return false; return true; } }
package e.a.a.a.y0.d.b; import e.x.c.i; import kotlin.jvm.internal.DefaultConstructorMarker; public abstract class h { public static final class a extends h { public final h a; /* JADX INFO: super call moved to the top of the method (can break code semantics) */ public a(h hVar) { super((DefaultConstructorMarker) null); i.e(hVar, "elementType"); this.a = hVar; } } public static final class b extends h { public final String a; /* JADX INFO: super call moved to the top of the method (can break code semantics) */ public b(String str) { super((DefaultConstructorMarker) null); i.e(str, "internalName"); this.a = str; } } public static final class c extends h { public final e.a.a.a.y0.j.w.c a; public c(e.a.a.a.y0.j.w.c cVar) { super((DefaultConstructorMarker) null); this.a = cVar; } } public h() { } public h(DefaultConstructorMarker defaultConstructorMarker) { } public String toString() { return j.a.c(this); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.dataframe.transforms; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; public class DataFrameTransformCheckpointingInfoTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, DataFrameTransformCheckpointingInfoTests::randomDataFrameTransformCheckpointingInfo, DataFrameTransformCheckpointingInfoTests::toXContent, DataFrameTransformCheckpointingInfo::fromXContent) .supportsUnknownFields(false) .test(); } public static DataFrameTransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() { return new DataFrameTransformCheckpointingInfo( DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), randomLongBetween(0, 10000)); } public static void toXContent(DataFrameTransformCheckpointingInfo info, XContentBuilder builder) throws IOException { builder.startObject(); if (info.getLast().getTimestampMillis() > 0) { builder.field(DataFrameTransformCheckpointingInfo.LAST_CHECKPOINT.getPreferredName()); DataFrameTransformCheckpointStatsTests.toXContent(info.getLast(), builder); } if (info.getNext().getTimestampMillis() > 0) { builder.field(DataFrameTransformCheckpointingInfo.NEXT_CHECKPOINT.getPreferredName()); DataFrameTransformCheckpointStatsTests.toXContent(info.getNext(), builder); } builder.field(DataFrameTransformCheckpointingInfo.OPERATIONS_BEHIND.getPreferredName(), info.getOperationsBehind()); builder.endObject(); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.containerregistry.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; /** The properties of the import pipeline source. */ @Fluent public final class ImportPipelineSourceProperties { @JsonIgnore private final ClientLogger logger = new ClientLogger(ImportPipelineSourceProperties.class); /* * The type of source for the import pipeline. */ @JsonProperty(value = "type") private PipelineSourceType type; /* * The source uri of the import pipeline. * When 'AzureStorageBlob': * "https://accountName.blob.core.windows.net/containerName/blobName" * When 'AzureStorageBlobContainer': * "https://accountName.blob.core.windows.net/containerName" */ @JsonProperty(value = "uri") private String uri; /* * They key vault secret uri to obtain the source storage SAS token. */ @JsonProperty(value = "keyVaultUri", required = true) private String keyVaultUri; /** * Get the type property: The type of source for the import pipeline. * * @return the type value. */ public PipelineSourceType type() { return this.type; } /** * Set the type property: The type of source for the import pipeline. * * @param type the type value to set. * @return the ImportPipelineSourceProperties object itself. */ public ImportPipelineSourceProperties withType(PipelineSourceType type) { this.type = type; return this; } /** * Get the uri property: The source uri of the import pipeline. When 'AzureStorageBlob': * "https://accountName.blob.core.windows.net/containerName/blobName" When 'AzureStorageBlobContainer': * "https://accountName.blob.core.windows.net/containerName". * * @return the uri value. */ public String uri() { return this.uri; } /** * Set the uri property: The source uri of the import pipeline. When 'AzureStorageBlob': * "https://accountName.blob.core.windows.net/containerName/blobName" When 'AzureStorageBlobContainer': * "https://accountName.blob.core.windows.net/containerName". * * @param uri the uri value to set. * @return the ImportPipelineSourceProperties object itself. */ public ImportPipelineSourceProperties withUri(String uri) { this.uri = uri; return this; } /** * Get the keyVaultUri property: They key vault secret uri to obtain the source storage SAS token. * * @return the keyVaultUri value. */ public String keyVaultUri() { return this.keyVaultUri; } /** * Set the keyVaultUri property: They key vault secret uri to obtain the source storage SAS token. * * @param keyVaultUri the keyVaultUri value to set. * @return the ImportPipelineSourceProperties object itself. */ public ImportPipelineSourceProperties withKeyVaultUri(String keyVaultUri) { this.keyVaultUri = keyVaultUri; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { if (keyVaultUri() == null) { throw logger .logExceptionAsError( new IllegalArgumentException( "Missing required property keyVaultUri in model ImportPipelineSourceProperties")); } } }
/* * Copyright 2018 ARP Network * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.arpnetwork.arpdevice.contracts.api; import org.arpnetwork.arpdevice.config.Config; import org.arpnetwork.arpdevice.ui.wallet.Wallet; import org.web3j.crypto.Credentials; import org.web3j.crypto.RawTransaction; import org.web3j.crypto.TransactionEncoder; import org.web3j.protocol.core.DefaultBlockParameterName; import org.web3j.protocol.core.methods.request.Transaction; import org.web3j.protocol.core.methods.response.EthEstimateGas; import org.web3j.protocol.core.methods.response.EthGetTransactionCount; import org.web3j.protocol.core.methods.response.EthTransaction; import org.web3j.protocol.core.methods.response.TransactionReceipt; import org.web3j.protocol.exceptions.TransactionException; import org.web3j.tx.response.PollingTransactionReceiptProcessor; import org.web3j.utils.Numeric; import java.io.IOException; import java.math.BigInteger; import java.util.concurrent.ExecutionException; public class TransactionAPI { private static final String DEFAULT_GAS_LIMIT = "400000"; /** * Get row transaction string by params * * @param gasPrice * @param gasLimit * @param contractAddress * @param data * @param credentials * @return */ public static String getRawTransaction(BigInteger gasPrice, BigInteger gasLimit, String contractAddress, String data, Credentials credentials) { RawTransaction transaction = getTransaction(gasPrice, gasLimit, contractAddress, data, credentials); byte[] signedMessage = TransactionEncoder.signMessage(transaction, credentials); String hexValue = Numeric.toHexString(signedMessage); return hexValue; } public static BigInteger getTransactionGasLimit(Transaction transaction) throws IOException { EthEstimateGas gas = EtherAPI.getWeb3J().ethEstimateGas(transaction).send(); return gas.hasError() ? new BigInteger(DEFAULT_GAS_LIMIT) : gas.getAmountUsed(); } public static BigInteger getAsyncTransactionGasLimit(Transaction transaction) { EthEstimateGas gas = null; try { gas = EtherAPI.getWeb3J().ethEstimateGas(transaction).sendAsync().get(); } catch (InterruptedException ignore) { } catch (ExecutionException ignore) { } return (gas == null || gas.hasError()) ? new BigInteger(DEFAULT_GAS_LIMIT) : gas.getAmountUsed(); } public static BigInteger estimateFunctionGasLimit(String functionString, String contractAddress) { String ownerAddress = Wallet.get().getAddress(); Transaction transaction = Transaction.createEthCallTransaction(ownerAddress, contractAddress, functionString); return TransactionAPI.getAsyncTransactionGasLimit(transaction); } public static boolean isTransactionPending(String transactionHash) throws IOException { EthTransaction transaction = EtherAPI.getWeb3J().ethGetTransactionByHash(transactionHash).send(); return transaction.getTransaction().getBlockNumberRaw() == null; } public static TransactionReceipt pollingTransaction(final String transactionHash) throws IOException, TransactionException { PollingTransactionReceiptProcessor processor = new PollingTransactionReceiptProcessor( EtherAPI.getWeb3J(), Config.DEFAULT_POLLING_FREQUENCY, Config.DEFAULT_POLLING_ATTEMPTS_PER_TX_HASH); return processor.waitForTransactionReceipt(transactionHash); } public static boolean isStatusOK(String status) { if (null == status) { return true; } BigInteger statusQuantity = Numeric.decodeQuantity(status); return BigInteger.ONE.equals(statusQuantity); } private static BigInteger getNonce(String address) { EthGetTransactionCount transactionCount = new EthGetTransactionCount(); try { transactionCount = EtherAPI.getWeb3J() .ethGetTransactionCount(address, DefaultBlockParameterName.PENDING).send(); } catch (IOException e) { e.printStackTrace(); } return transactionCount.getTransactionCount(); } private static BigInteger getNonceAsync(String address) throws ExecutionException, InterruptedException { EthGetTransactionCount transactionCount = EtherAPI.getWeb3J() .ethGetTransactionCount(address, DefaultBlockParameterName.PENDING).sendAsync().get(); return transactionCount.getTransactionCount(); } private static RawTransaction getTransaction(BigInteger gasPrice, BigInteger gasLimit, String contractAddress, String data, Credentials credentials) { BigInteger nonce = null; try { nonce = getNonceAsync(credentials.getAddress()); } catch (ExecutionException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } RawTransaction rawTransaction = RawTransaction.createTransaction( nonce, gasPrice, gasLimit, contractAddress, data); return rawTransaction; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.xml; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.annotation.ElementType; /** * Annotates methods with return values that should be merged with <b>AND</b> semantics by {@link ModelMerger} * * @author Gregory.Shrago */ @Target({ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) public @interface Intersect { }
package com.semantalytics.jena.function.console; import org.apache.jena.sparql.expr.NodeValue; import org.apache.jena.sparql.function.FunctionBase0; import static org.apache.jena.sparql.expr.NodeValue.*; import static org.fusesource.jansi.Ansi.Attribute.*; import static org.fusesource.jansi.Ansi.ansi; public class BlinkOff extends FunctionBase0 { public static final String name = ConsoleVocabulary.blinkOff.stringValue(); @Override public NodeValue exec() { return makeString(ansi().a(BLINK_OFF).toString()); } }
/* * Copyright 2000-2016 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client.connectors.grid; import com.vaadin.shared.ui.Connect; import com.vaadin.shared.ui.grid.renderers.LocalDateRendererState; /** * A connector for LocalDateRenderer. * <p> * The server-side Renderer operates on {@code LocalDate}s, but the data is * serialized as a string, and displayed as-is on the client side. This is to be * able to support the server's locale. * * @since 8.1 * @author Vaadin Ltd */ @Connect(com.vaadin.ui.renderers.LocalDateRenderer.class) public class LocalDateRendererConnector extends TextRendererConnector { @Override public LocalDateRendererState getState() { return (LocalDateRendererState) super.getState(); } }
package Pages; import java.util.Scanner; public class _0_Home { static Scanner sc; public static void run() throws Exception { while(true){ System.out.println("###### Home Page #######"); menu(); System.out.print("please select an option: "); _0_Home.sc = new Scanner(System.in); int a = sc.nextInt(); switch (a){ case 1 : login(); break; case 2 : signUp(); break; case 3 : showQueries(); break; case 4: exit(); break; default : System.out.println("option not avaliable, select options from 1-4"); } } } static void menu(){ System.out.println("1. Login\n" + "2. Sign Up\n" + "3. showQueries\n" + "4. Exit"); } static void login() throws Exception { _0_Login.run(); } static void signUp() throws Exception { _3_User_Type.run(); } static void showQueries(){ //Code to show the queries we have return; } static void exit(){ System.out.println("Exiting"); System.exit(0); } }
package com.ust.calc.calculadora.clients; public interface EmployeeDeleteClient { /** * Call other service where is the datasource with employees */ public void deleteEmployee(String id); }
package com.example.lin.a415.model; /** * Created by lin on 2018/4/15. */ public class SwitchVideoModel { private String url; private String name; public SwitchVideoModel(String name, String url) { this.name = name; this.url = url; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getName() { return name; } public void setName(String name) { this.name = name; } @Override public String toString() { return this.name; } }
/* * Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.nio.channels; public abstract class ServerSocketChannel extends java.nio.channels.spi.AbstractSelectableChannel implements java.nio.channels.NetworkChannel { protected ServerSocketChannel(java.nio.channels.spi.SelectorProvider provider) { super((java.nio.channels.spi.SelectorProvider)null); throw new RuntimeException("Stub!"); } public static java.nio.channels.ServerSocketChannel open() throws java.io.IOException { throw new RuntimeException("Stub!"); } public final int validOps() { throw new RuntimeException("Stub!"); } public final java.nio.channels.ServerSocketChannel bind(java.net.SocketAddress local) throws java.io.IOException { throw new RuntimeException("Stub!"); } public abstract java.nio.channels.ServerSocketChannel bind(java.net.SocketAddress local, int backlog) throws java.io.IOException; public abstract <T> java.nio.channels.ServerSocketChannel setOption(java.net.SocketOption<T> name, T value) throws java.io.IOException; public abstract java.net.ServerSocket socket(); public abstract java.nio.channels.SocketChannel accept() throws java.io.IOException; public abstract java.net.SocketAddress getLocalAddress() throws java.io.IOException; }
package io.github.isanwenyu.demo; import android.content.Context; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.*; /** * Instrumentation test, which will execute on an Android device. * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(AndroidJUnit4.class) public class ExampleInstrumentedTest { @Test public void useAppContext() throws Exception { // Context of the app under test. Context appContext = InstrumentationRegistry.getTargetContext(); assertEquals("io.github.isanwenyu.demo", appContext.getPackageName()); } }
/* * Copyright 2006-2008 Sxip Identity Corporation */ package org.openid4java.server; import org.openid4java.association.Association; import org.openid4java.association.AssociationException; import java.util.*; import org.springframework.jdbc.core.support.JdbcDaoSupport; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.dao.DataAccessException; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * JDBC implementation for the ServerAssociationStore interface. * <p> * The JdbcServerAssociation store requires a javax.sql.DataSource to be * configured and passed in to it with the setDataSource setter method. * The table name also needs to be specified, either through the constructor, * or through the setTableName setter. * <p> * The specified table must have the following structure: * <ul> * <li>handle : string : primary key</li> * <li>type : string</li> * <li>mackey : string</li> * <li>expdate : date</li> * </ul> * * @author Marius Scurtescu, Johnny Bufu */ public class JdbcServerAssociationStore extends JdbcDaoSupport implements ServerAssociationStore { private static Log _log = LogFactory.getLog(JdbcServerAssociationStore.class); private static final boolean DEBUG = _log.isDebugEnabled(); private static Random _random = new Random(System.currentTimeMillis()); private static final int CLEANUP_INTERVAL = 60 * 1000; // 1 min in millis private static long _lastCleanup = 0; private String _tableName; public JdbcServerAssociationStore() { } public JdbcServerAssociationStore(String tableName) { _tableName = tableName; } public String getTableName() { return _tableName; } public void setTableName(String tableName) { this._tableName = tableName; } public Association generate(String type, int expiryIn) throws AssociationException { cleanupExpired(); String sql = "INSERT INTO " + _tableName + " (handle, type, mackey, expdate) VALUES (?,?,?,?)"; JdbcTemplate jdbcTemplate = getJdbcTemplate(); int attemptsLeft = 5; while (attemptsLeft > 0) { try { String handle = Long.toHexString(_random.nextLong()); Association association = Association.generate(type, handle, expiryIn); int cnt = jdbcTemplate.update(sql, new Object[] { association.getHandle(), association.getType(), new String(Base64.encodeBase64( association.getMacKey().getEncoded())), association.getExpiry() }); if (cnt == 1) { if (DEBUG) _log.debug("Generated association, handle: " + handle + " type: " + type + " expires in: " + expiryIn + " seconds."); return association; } } catch (DataAccessException e) { _log.error("Error generating association; attempts left: " + (attemptsLeft-1), e); } attemptsLeft--; } throw new AssociationException( "JDBCServerAssociationStore: Error generating association."); } public Association load(String handle) { try { String sql = "SELECT type,mackey,expdate FROM " + _tableName + " WHERE handle=?"; JdbcTemplate jdbcTemplate = getJdbcTemplate(); Map res = jdbcTemplate.queryForMap(sql, new Object[] {handle}); String type = (String) res.get("type"); String macKey = (String) res.get("mackey"); Date expDate = (Date) res.get("expdate"); if (type == null || macKey == null || expDate == null) throw new AssociationException("Invalid association data " + "retrived from database; cannot create Association " + "object for handle: " + handle); Association assoc; if (Association.TYPE_HMAC_SHA1.equals(type)) assoc = Association.createHmacSha1(handle, Base64.decodeBase64(macKey.getBytes() ), expDate); else if (Association.TYPE_HMAC_SHA256.equals(type)) assoc = Association.createHmacSha256(handle, Base64.decodeBase64(macKey.getBytes() ), expDate); else throw new AssociationException("Invalid association type " + "retrieved from database: " + type); if (DEBUG) _log.debug("Retrieved association for handle: " + handle + " from table: " + _tableName); return assoc; } catch (AssociationException ase ) { _log.error("Error retrieving association from table: " + _tableName, ase); return null; } catch (IncorrectResultSizeDataAccessException rse) { _log.warn("Association not found for handle: " + handle + " in the table: " + _tableName); return null; } catch (DataAccessException dae) { _log.error("Error retrieving association for handle: " + handle + "from table: " + _tableName, dae); return null; } } public void remove(String handle) { try { String sql = "DELETE FROM " + _tableName + " WHERE handle=?"; JdbcTemplate jdbcTemplate = getJdbcTemplate(); int cnt = jdbcTemplate.update(sql, new Object[] { handle } ); if (cnt == 1 && DEBUG) _log.debug("Removed association, handle: " + handle + " from table: " + _tableName); if (cnt != 1) _log.warn("Trying to remove handle: " + handle + " from table: " + _tableName + "; affected entries: " + cnt); } catch (Exception e) { _log.error("Error removing association from table: " + _tableName, e); } } private void cleanupExpired() { if (System.currentTimeMillis() - _lastCleanup < CLEANUP_INTERVAL) return; try { String sql = "DELETE FROM " + _tableName + " WHERE expdate<?"; JdbcTemplate jdbcTemplate = getJdbcTemplate(); Date now = new Date ( ) ; int cnt = jdbcTemplate.update(sql, new Object[] { now } ); _log.debug("Cleaned " + cnt + " expired associations from table: " + _tableName); _lastCleanup = System.currentTimeMillis(); } catch (Exception e) { _log.error("Error removing expired associations from table: " + _tableName, e); } } }
package com.udacity.jdnd.course3.critter.entity; import com.udacity.jdnd.course3.critter.pet.PetType; import org.hibernate.annotations.ManyToAny; import org.hibernate.annotations.Nationalized; import javax.persistence.*; import java.time.LocalDate; import java.util.List; @Entity public class Pet { @Id @GeneratedValue private Long id; private PetType type; @Nationalized private String name; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "customer_id") private Customer customer; private LocalDate birthDate; private String notes; @ManyToMany(mappedBy = "pets") private List<Schedule> schedules; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public PetType getType() { return type; } public void setType(PetType type) { this.type = type; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Customer getCustomer() { return customer; } public void setCustomer(Customer customer) { this.customer = customer; } public LocalDate getBirthDate() { return birthDate; } public void setBirthDate(LocalDate birthDate) { this.birthDate = birthDate; } public String getNotes() { return notes; } public void setNotes(String notes) { this.notes = notes; } public List<Schedule> getSchedules() { return schedules; } public void setSchedules(List<Schedule> schedules) { this.schedules = schedules; } }
/* * Copyright 2008-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.hasor.dbvisitor.dal.session; import net.hasor.cobble.CollectionUtils; import java.util.*; import java.util.function.BiConsumer; /** * 可以将多个Map合并成一个Map对象进行操作。 * @version : 2016-07-17 * @author 赵永春 (zyc@hasor.net) */ class MergedMap<K, T> extends AbstractMap<K, T> { private final Map<K, T> unmerged = new HashMap<>(); private final List<Map<K, T>> merged = new ArrayList<>(); private final List<Boolean> keyLocked = new ArrayList<>(); @Override public int size() { int size = this.unmerged.size(); for (Map<K, T> item : this.merged) { size += item.size(); } return size; } @Override public boolean isEmpty() { if (!this.unmerged.isEmpty()) { return false; } for (Map<K, T> item : this.merged) { if (!item.isEmpty()) { return false; } } return true; } @Override public boolean containsValue(Object value) { if (this.unmerged.containsValue(value)) { return true; } for (Map<K, T> item : this.merged) { if (item.containsValue(value)) { return true; } } return false; } @Override public boolean containsKey(Object key) { if (this.unmerged.containsKey(key)) { return true; } for (Map<K, T> item : this.merged) { if (item.containsKey(key)) { return true; } } return false; } @Override public T get(Object key) { if (this.unmerged.containsKey(key)) { return this.unmerged.get(key); } for (Map<K, T> item : this.merged) { if (item.containsKey(key)) { return item.get(key); } } return null; } @Override public T put(K key, T value) { if (this.unmerged.containsKey(key)) { return this.unmerged.put(key, value); } for (Map<K, T> item : this.merged) { if (item.containsKey(key)) { return item.put(key, value); } } return this.unmerged.put(key, value); } @Override public T remove(Object key) { if (this.unmerged.containsKey(key)) { return this.unmerged.remove(key); } for (int i = 0; i < this.merged.size(); i++) { Map<K, T> item = this.merged.get(i); Boolean keyLocked = this.keyLocked.get(i); if (item.containsKey(key) && !keyLocked) { if (keyLocked) { return item.put((K) key, null); } else { return item.remove(key); } } } return this.unmerged.remove(key); } @Override public void putAll(Map<? extends K, ? extends T> m) { m.forEach((BiConsumer<K, T>) this::put); } @Override public void clear() { this.unmerged.clear(); for (int i = 0; i < this.merged.size(); i++) { Map<K, T> item = this.merged.get(i); Boolean keyLocked = this.keyLocked.get(i); if (keyLocked) { Set<K> keys = item.keySet(); for (K key : keys) { item.put(key, null); } } else { item.clear(); } } } @Override public Set<K> keySet() { Set<K> keySet = new HashSet<>(this.unmerged.keySet()); for (Map<K, T> item : this.merged) { keySet.addAll(item.keySet()); } return keySet; } @Override public Collection<T> values() { ArrayList<T> keySet = new ArrayList<>(this.unmerged.values()); for (Map<K, T> item : this.merged) { keySet.addAll(item.values()); } return keySet; } @Override public Set<Entry<K, T>> entrySet() { Iterator<Entry<K, T>> basic = this.unmerged.entrySet().iterator(); for (Map<K, T> item : this.merged) { basic = CollectionUtils.mergeIterator(basic, item.entrySet().iterator()); } final int size = size(); Iterator<Entry<K, T>> finalBasic = basic; return new AbstractSet<Entry<K, T>>() { @Override public Iterator<Entry<K, T>> iterator() { return finalBasic; } @Override public int size() { return size; } }; } public void appendMap(Map<? extends K, ? extends T> object, boolean keyLocked) { if (object != null) { this.merged.add((Map<K, T>) object); this.keyLocked.add(keyLocked); } } }
package xyz.flysium.dao.entity; import java.io.Serializable; import java.util.Date; public class UserAccountBookCategoryDO implements Serializable { private Long id; private Long uid; private Long gid; private Long cid; private Long creator; private Long updater; private Date createTime; private Date updateTime; private String remark; private Byte isDeleted; private static final long serialVersionUID = 1L; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Long getUid() { return uid; } public void setUid(Long uid) { this.uid = uid; } public Long getGid() { return gid; } public void setGid(Long gid) { this.gid = gid; } public Long getCid() { return cid; } public void setCid(Long cid) { this.cid = cid; } public Long getCreator() { return creator; } public void setCreator(Long creator) { this.creator = creator; } public Long getUpdater() { return updater; } public void setUpdater(Long updater) { this.updater = updater; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getUpdateTime() { return updateTime; } public void setUpdateTime(Date updateTime) { this.updateTime = updateTime; } public String getRemark() { return remark; } public void setRemark(String remark) { this.remark = remark; } public Byte getIsDeleted() { return isDeleted; } public void setIsDeleted(Byte isDeleted) { this.isDeleted = isDeleted; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(getClass().getSimpleName()); sb.append(" ["); sb.append("Hash = ").append(hashCode()); sb.append(", id=").append(id); sb.append(", uid=").append(uid); sb.append(", gid=").append(gid); sb.append(", cid=").append(cid); sb.append(", creator=").append(creator); sb.append(", updater=").append(updater); sb.append(", createTime=").append(createTime); sb.append(", updateTime=").append(updateTime); sb.append(", remark=").append(remark); sb.append(", isDeleted=").append(isDeleted); sb.append(", serialVersionUID=").append(serialVersionUID); sb.append("]"); return sb.toString(); } @Override public boolean equals(Object that) { if (this == that) { return true; } if (that == null) { return false; } if (getClass() != that.getClass()) { return false; } UserAccountBookCategoryDO other = (UserAccountBookCategoryDO) that; return (this.getId() == null ? other.getId() == null : this.getId().equals(other.getId())) && (this.getUid() == null ? other.getUid() == null : this.getUid().equals(other.getUid())) && (this.getGid() == null ? other.getGid() == null : this.getGid().equals(other.getGid())) && (this.getCid() == null ? other.getCid() == null : this.getCid().equals(other.getCid())) && (this.getCreator() == null ? other.getCreator() == null : this.getCreator().equals(other.getCreator())) && (this.getUpdater() == null ? other.getUpdater() == null : this.getUpdater().equals(other.getUpdater())) && (this.getCreateTime() == null ? other.getCreateTime() == null : this.getCreateTime().equals(other.getCreateTime())) && (this.getUpdateTime() == null ? other.getUpdateTime() == null : this.getUpdateTime().equals(other.getUpdateTime())) && (this.getRemark() == null ? other.getRemark() == null : this.getRemark().equals(other.getRemark())) && (this.getIsDeleted() == null ? other.getIsDeleted() == null : this.getIsDeleted().equals(other.getIsDeleted())); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((getId() == null) ? 0 : getId().hashCode()); result = prime * result + ((getUid() == null) ? 0 : getUid().hashCode()); result = prime * result + ((getGid() == null) ? 0 : getGid().hashCode()); result = prime * result + ((getCid() == null) ? 0 : getCid().hashCode()); result = prime * result + ((getCreator() == null) ? 0 : getCreator().hashCode()); result = prime * result + ((getUpdater() == null) ? 0 : getUpdater().hashCode()); result = prime * result + ((getCreateTime() == null) ? 0 : getCreateTime().hashCode()); result = prime * result + ((getUpdateTime() == null) ? 0 : getUpdateTime().hashCode()); result = prime * result + ((getRemark() == null) ? 0 : getRemark().hashCode()); result = prime * result + ((getIsDeleted() == null) ? 0 : getIsDeleted().hashCode()); return result; } }
package com.ddhigh.halia.chat.client.io.packet; import com.ddhigh.halia.chat.client.io.protocol.MsgType; import com.ddhigh.halia.chat.client.io.protocol.Opcode; import com.ddhigh.halia.chat.client.io.protocol.Packet; import io.netty.buffer.ByteBuf; @Packet(opcode = Opcode.PublicChatReq) public class PublicChatReq extends AbstractPacket { private MsgType msgType; private String message; public PublicChatReq(MsgType msgType, String message) { this.msgType = msgType; this.message = message; } public PublicChatReq() { } @Override public void read(ByteBuf buf) { msgType = MsgType.valueOf(buf.readByte()); message = readShortString(buf); } @Override public void write(ByteBuf buf) { buf.writeByte(msgType.value()); writeShortString(buf, message); } public MsgType getMsgType() { return msgType; } public String getMessage() { return message; } @Override public String toString() { return "PublicChatReq{" + "msgType=" + msgType + ", message='" + message + '\'' + '}'; } }
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. */ package com.microsoft.azure.toolkit.intellij.mysql; import com.microsoft.azure.toolkit.intellij.common.AzureComboBox; import com.microsoft.azure.toolkit.lib.Azure; import com.microsoft.azure.toolkit.lib.common.operation.AzureOperation; import com.microsoft.azure.toolkit.lib.mysql.AzureMySql; import com.microsoft.azuretools.azurecommons.helpers.NotNull; import java.util.List; public class VersionComboBox extends AzureComboBox<String> { @NotNull @Override @AzureOperation( name = "mysql|version.list.supported", type = AzureOperation.Type.SERVICE ) protected List<? extends String> loadItems() { return Azure.az(AzureMySql.class).listSupportedVersions(); } }
package lk.ijse.pos.dao.custom.impl; import lk.ijse.pos.dao.CrudDAOImpl; import lk.ijse.pos.dao.custom.OrderDAO; import lk.ijse.pos.entity.Order; public class OrderDAOImpl extends CrudDAOImpl<Order, Integer> implements OrderDAO{ @Override public int getLastOrderId() throws Exception { return (int) session.createNativeQuery("SELECT id FROM `Order` ORDER BY id DESC LIMIT 1").uniqueResult(); } }
package com.prowidesoftware.swift.model.mx; import com.prowidesoftware.swift.model.mx.dic.*; import com.prowidesoftware.swift.model.mx.AbstractMX; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import com.prowidesoftware.swift.model.MxSwiftMessage; import com.prowidesoftware.swift.model.mx.AbstractMX; import com.prowidesoftware.swift.model.mx.MxRead; import com.prowidesoftware.swift.model.mx.MxReadImpl; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; /** * Class for fxtr.030.001.04 ISO 20022 message. * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "Document", propOrder = { "fxTradBlkStsNtfctn" }) @XmlRootElement(name = "Document", namespace = "urn:iso:std:iso:20022:tech:xsd:fxtr.030.001.04") public class MxFxtr03000104 extends AbstractMX { @XmlElement(name = "FXTradBlkStsNtfctn", required = true) protected ForeignExchangeTradeBulkStatusNotificationV04 fxTradBlkStsNtfctn; public final static transient String BUSINESS_PROCESS = "fxtr"; public final static transient int FUNCTIONALITY = 30; public final static transient int VARIANT = 1; public final static transient int VERSION = 4; @SuppressWarnings("rawtypes") public final static transient Class[] _classes = new Class[] {AddressType2Code.class, AllocationIndicator1Code.class, ClearingBrokerIdentification1 .class, ClearingSystemIdentification2Choice.class, CollateralisationIndicator1Code.class, CorporateSectorIdentifier1Code.class, CounterpartySideTransactionReporting1 .class, DateAndDateTimeChoice.class, ForeignExchangeTradeBulkStatusNotificationV04 .class, MxFxtr03000104 .class, NameAndAddress8 .class, Pagination.class, PartyIdentification44 .class, PartyIdentification59 .class, PartyIdentification73Choice.class, PostalAddress1 .class, RegulatoryReporting4 .class, SideIndicator1Code.class, Status27Choice.class, Status28Choice.class, StatusAndSubStatus2 .class, StatusSubType2Code.class, SupplementaryData1 .class, SupplementaryDataEnvelope1 .class, TradeData11 .class, TradeData12 .class, TradeStatus6Code.class, TradeStatus7Code.class, TradingSideTransactionReporting1 .class, UnderlyingProductIdentifier1Code.class, UniqueTransactionIdentifier2 .class }; public final static transient String NAMESPACE = "urn:iso:std:iso:20022:tech:xsd:fxtr.030.001.04"; public MxFxtr03000104() { super(); } /** * Creates the MX object parsing the parameter String with the XML content * */ public MxFxtr03000104(final String xml) { this(); MxFxtr03000104 tmp = parse(xml); fxTradBlkStsNtfctn = tmp.getFXTradBlkStsNtfctn(); } /** * Creates the MX object parsing the raw content from the parameter MxSwiftMessage * */ public MxFxtr03000104(final MxSwiftMessage mxSwiftMessage) { this(mxSwiftMessage.message()); } /** * Gets the value of the fxTradBlkStsNtfctn property. * * @return * possible object is * {@link ForeignExchangeTradeBulkStatusNotificationV04 } * */ public ForeignExchangeTradeBulkStatusNotificationV04 getFXTradBlkStsNtfctn() { return fxTradBlkStsNtfctn; } /** * Sets the value of the fxTradBlkStsNtfctn property. * * @param value * allowed object is * {@link ForeignExchangeTradeBulkStatusNotificationV04 } * */ public MxFxtr03000104 setFXTradBlkStsNtfctn(ForeignExchangeTradeBulkStatusNotificationV04 value) { this.fxTradBlkStsNtfctn = value; return this; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } @Override public boolean equals(Object that) { return EqualsBuilder.reflectionEquals(this, that); } @Override public int hashCode() { return HashCodeBuilder.reflectionHashCode(this); } @Override public String getBusinessProcess() { return BUSINESS_PROCESS; } @Override public int getFunctionality() { return FUNCTIONALITY; } @Override public int getVariant() { return VARIANT; } @Override public int getVersion() { return VERSION; } /** * Creates the MX object parsing the raw content from the parameter XML * */ public static MxFxtr03000104 parse(String xml) { return ((MxFxtr03000104) MxReadImpl.parse(MxFxtr03000104 .class, xml, _classes)); } /** * Creates the MX object parsing the raw content from the parameter XML with injected read implementation * @since 9.0.1 * * @param parserImpl an MX unmarshall implementation */ public static MxFxtr03000104 parse(String xml, MxRead parserImpl) { return ((MxFxtr03000104) parserImpl.read(MxFxtr03000104 .class, xml, _classes)); } @Override public String getNamespace() { return NAMESPACE; } @Override @SuppressWarnings("rawtypes") public Class[] getClasses() { return _classes; } /** * Creates an MxFxtr03000104 messages from its JSON representation. * <p> * For generic conversion of JSON into the corresponding MX instance * see {@link AbstractMX#fromJson(String)} * * @since 7.10.2 * * @param json a JSON representation of an MxFxtr03000104 message * @return * a new instance of MxFxtr03000104 */ public final static MxFxtr03000104 fromJson(String json) { return AbstractMX.fromJson(json, MxFxtr03000104 .class); } }
package com.thoughtworks.xstream.persistence; import java.util.AbstractList; /** * A persistent list implementation backed on a XmlMap. * * @author Guilherme Silveira */ public class XmlArrayList extends AbstractList { private final XmlMap map; public XmlArrayList(StreamStrategy streamStrategy) { this.map = new XmlMap(streamStrategy); } public int size() { return map.size(); } public Object set(int index, Object element) { rangeCheck(index); Object value = get(index); map.put(String.valueOf(index), element); return value; } public void add(int index, Object element) { int size = size(); if (index >= (size + 1) || index < 0) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + size); } int to = index != size ? index - 1 : index; for (int i = size; i > to; i--) { map.put(String.valueOf(i + 1), map.get(String.valueOf(i))); } map.put(String.valueOf(index), element); } private void rangeCheck(int index) { int size = size(); if (index >= size || index < 0) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + size); } } public Object get(int index) { rangeCheck(index); return map.get(String.valueOf(index)); } public Object remove(int index) { int size = size(); rangeCheck(index); Object value = map.get(String.valueOf(index)); for (int i = index; i < size - 1; i++) { map.put(String.valueOf(i), map.get(String.valueOf(i + 1))); } map.remove(String.valueOf(size - 1)); return value; } }
// Copyright (C) 2018 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.logging; import static java.util.Objects.requireNonNull; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.MultimapBuilder; import com.google.common.collect.SetMultimap; import com.google.common.flogger.backend.Tags; public class MutableTags { private final SetMultimap<String, String> tagMap = MultimapBuilder.hashKeys().hashSetValues().build(); private Tags tags = Tags.empty(); public Tags getTags() { return tags; } /** * Adds a tag if a tag with the same name and value doesn't exist yet. * * @param name the name of the tag * @param value the value of the tag * @return {@code true} if the tag was added, {@code false} if the tag was not added because it * already exists */ public boolean add(String name, String value) { requireNonNull(name, "tag name is required"); requireNonNull(value, "tag value is required"); boolean ret = tagMap.put(name, value); if (ret) { buildTags(); } return ret; } /** * Removes the tag with the given name and value. * * @param name the name of the tag * @param value the value of the tag */ public void remove(String name, String value) { requireNonNull(name, "tag name is required"); requireNonNull(value, "tag value is required"); if (tagMap.remove(name, value)) { buildTags(); } } /** * Checks if the contained tag map is empty. * * @return {@code true} if there are no tags, otherwise {@code false} */ public boolean isEmpty() { return tagMap.isEmpty(); } /** Clears all tags. */ public void clear() { tagMap.clear(); tags = Tags.empty(); } /** * Returns the tags as Multimap. * * @return the tags as Multimap */ public ImmutableSetMultimap<String, String> asMap() { return ImmutableSetMultimap.copyOf(tagMap); } /** * Replaces the existing tags with the provided tags. * * @param tags the tags that should be set. */ void set(ImmutableSetMultimap<String, String> tags) { tagMap.clear(); tags.forEach(tagMap::put); buildTags(); } private void buildTags() { if (tagMap.isEmpty()) { if (tags.isEmpty()) { return; } tags = Tags.empty(); return; } Tags.Builder tagsBuilder = Tags.builder(); tagMap.forEach(tagsBuilder::addTag); tags = tagsBuilder.build(); } }
// Copyright 2018 Sebastian Kuerten // // This file is part of OpenMetroMaps. // // OpenMetroMaps is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // OpenMetroMaps is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with OpenMetroMaps. If not, see <http://www.gnu.org/licenses/>. package org.openmetromaps.rawstations.xml; import java.io.InputStream; import org.openmetromaps.rawstations.RawStationModel; import org.openmetromaps.rawstations.xml.XmlStationReader; import de.topobyte.xml.domabstraction.desktopimpl.DesktopDocumentFactory; import de.topobyte.xml.domabstraction.iface.ParsingException; public class DesktopXmlStationReader { public static RawStationModel read(InputStream is) throws ParsingException { return XmlStationReader.read(new DesktopDocumentFactory(), is); } }
/* * 3D City Database - The Open Source CityGML Database * https://www.3dcitydb.org/ * * Copyright 2013 - 2021 * Chair of Geoinformatics * Technical University of Munich, Germany * https://www.lrg.tum.de/gis/ * * The 3D City Database is jointly developed with the following * cooperation partners: * * Virtual City Systems, Berlin <https://vc.systems/> * M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.citydb.core.operation.common.cache.model; import org.citydb.core.database.adapter.AbstractSQLAdapter; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; public class CacheTableTextureCoordList extends AbstractCacheTableModel { public static CacheTableTextureCoordList instance = null; public synchronized static CacheTableTextureCoordList getInstance() { if (instance == null) instance = new CacheTableTextureCoordList(); return instance; } @Override public void createIndexes(Connection conn, String tableName, String properties) throws SQLException { try (Statement stmt = conn.createStatement()) { stmt.executeUpdate("create index idx_" + tableName + " on " + tableName + " (ID) " + properties); stmt.executeUpdate("create index idx2_" + tableName + " on " + tableName + " (GMLID) " + properties); stmt.executeUpdate("create index idx3_" + tableName + " on " + tableName + " (TARGET_ID) " + properties); } } @Override public CacheTableModel getType() { return CacheTableModel.TEXTURE_COORD_LIST; } @Override protected String getColumns(AbstractSQLAdapter sqlAdapter) { return "(" + "ID " + sqlAdapter.getInteger() + ", " + "GMLID " + sqlAdapter.getCharacterVarying(256) + ", " + "TEXPARAM_GMLID " + sqlAdapter.getCharacterVarying(256) + ", " + "TEXTURE_COORDINATES " + sqlAdapter.getPolygon2D() + ", " + "TARGET_ID " + sqlAdapter.getInteger() + ")"; } }
package mil.dds.anet.search; import mil.dds.anet.beans.AuthorizationGroup; import mil.dds.anet.beans.lists.AnetBeanList; import mil.dds.anet.beans.search.AuthorizationGroupSearchQuery; import mil.dds.anet.beans.search.ISearchQuery.SortOrder; import mil.dds.anet.database.mappers.AuthorizationGroupMapper; import mil.dds.anet.utils.DaoUtils; import ru.vyarus.guicey.jdbi3.tx.InTransaction; public abstract class AbstractAuthorizationGroupSearcher extends AbstractSearcher<AuthorizationGroup, AuthorizationGroupSearchQuery> implements IAuthorizationGroupSearcher { public AbstractAuthorizationGroupSearcher( AbstractSearchQueryBuilder<AuthorizationGroup, AuthorizationGroupSearchQuery> qb) { super(qb); } @InTransaction @Override public AnetBeanList<AuthorizationGroup> runSearch(AuthorizationGroupSearchQuery query) { buildQuery(query); return qb.buildAndRun(getDbHandle(), query, new AuthorizationGroupMapper()); } @Override protected void buildQuery(AuthorizationGroupSearchQuery query) { qb.addSelectClause("\"authorizationGroups\".*"); qb.addTotalCount(); qb.addFromClause("\"authorizationGroups\""); if (query.isTextPresent()) { addTextQuery(query); } qb.addEqualsClause("status", "\"authorizationGroups\".status", query.getStatus()); if (query.getPositionUuid() != null) { // Search for authorization groups related to a given position qb.addWhereClause( "\"authorizationGroups\".uuid IN (SELECT ap.\"authorizationGroupUuid\" FROM \"authorizationGroupPositions\" ap" + " WHERE ap.\"positionUuid\" = :positionUuid)"); qb.addSqlArg("positionUuid", query.getPositionUuid()); } if (Boolean.TRUE.equals(query.isInMyReports())) { qb.addFromClause("JOIN (" + " SELECT \"reportAuthorizationGroups\".\"authorizationGroupUuid\" AS uuid, MAX(reports.\"createdAt\") AS max" + " FROM reports" + " JOIN \"reportAuthorizationGroups\" ON reports.uuid = \"reportAuthorizationGroups\".\"reportUuid\"" + " WHERE reports.\"authorUuid\" = :userUuid" + " GROUP BY \"reportAuthorizationGroups\".\"authorizationGroupUuid\"" + ") \"inMyReports\" ON \"authorizationGroups\".uuid = \"inMyReports\".uuid"); qb.addSqlArg("userUuid", DaoUtils.getUuid(query.getUser())); } addOrderByClauses(qb, query); } protected abstract void addTextQuery(AuthorizationGroupSearchQuery query); protected void addOrderByClauses(AbstractSearchQueryBuilder<?, ?> qb, AuthorizationGroupSearchQuery query) { switch (query.getSortBy()) { case CREATED_AT: qb.addAllOrderByClauses( getOrderBy(query.getSortOrder(), "\"authorizationGroups\"", "\"createdAt\"")); break; case RECENT: if (Boolean.TRUE.equals(query.isInMyReports())) { // Otherwise the JOIN won't exist qb.addAllOrderByClauses(getOrderBy(query.getSortOrder(), "\"inMyReports\"", "max")); } break; case NAME: default: qb.addAllOrderByClauses( getOrderBy(query.getSortOrder(), "\"authorizationGroups\"", "name")); break; } qb.addAllOrderByClauses(getOrderBy(SortOrder.ASC, "\"authorizationGroups\"", "uuid")); } }
package speedith.core.reasoning.rules.transformers.copTrans; import speedith.core.lang.*; import speedith.core.lang.cop.Arrow; import speedith.core.lang.cop.COPDiagram; import speedith.core.lang.cop.Cardinality; import speedith.core.lang.cop.CompleteCOPDiagram; import speedith.core.reasoning.ApplyStyle; import speedith.core.reasoning.args.copArgs.ArrowArg; import java.util.*; /** * Removes an arrow from a diagram, the diagram has to be a COP diagram. This rule dosen't operate on null, and compound diagrams. * @author Zohreh Shams [zs315@cam.ac.uk] * */ public class RemoveArrowsTransformer extends IdTransformer { private final ApplyStyle applyStyle; private List<ArrowArg> targetArrows; public RemoveArrowsTransformer(List<ArrowArg> targetArrows, ApplyStyle applyStyle ) { this.targetArrows = targetArrows; this.applyStyle = applyStyle; } @Override public SpiderDiagram transform(PrimarySpiderDiagram psd, int diagramIndex, ArrayList<CompoundSpiderDiagram> parents, ArrayList<Integer> childIndices) { if(targetArrows.isEmpty()){ return psd; } int subDiagramIndex = targetArrows.get(0).getSubDiagramIndex(); TreeSet<Arrow> newArrows = new TreeSet<>(); PrimarySpiderDiagram temp = psd; if (diagramIndex == subDiagramIndex){ if (!(psd instanceof COPDiagram)){ throw new TransformationException("The rule is not applicable to this diagram."); } COPDiagram cop = (COPDiagram) psd; if (! cop.getArrows().containsAll(getTargetArrows())) { throw new TransformationException("The arrows to be removed do not exist in the target diagram"); } newArrows.addAll(cop.getArrows()); newArrows.removeAll(getTargetArrows()); if (temp instanceof CompleteCOPDiagram){ CompleteCOPDiagram compCop = (CompleteCOPDiagram) temp; TreeMap<Arrow,Cardinality> newCardinalities = new TreeMap<Arrow,Cardinality>(); newCardinalities.putAll(compCop.getArrowCardinalities()); for (Arrow arrow : getTargetArrows()){ newCardinalities.remove(arrow); } return SpiderDiagrams.createCompleteCOPDiagram(compCop.getSpiders(), compCop.getHabitats(), compCop.getShadedZones(), compCop.getPresentZones(), newArrows, compCop.getSpiderLabels(), compCop.getCurveLabels(), newCardinalities, compCop.getSpiderComparators()); }else{ return COPDiagram.createCOPDiagram(cop.getSpiders(), cop.getHabitats(), cop.getShadedZones(), cop.getPresentZones(), newArrows); } // if(psd instanceof CompleteCOPDiagram){ // CompleteCOPDiagram compCop = (CompleteCOPDiagram) psd; // // if (! compCop.getArrows().containsAll(getTargetArrows())) { // throw new TransformationException("The arrows to be removed do not exist in the target diagram"); // } // // newArrows.addAll(compCop.getArrows()); // newArrows.removeAll(getTargetArrows()); // // TreeMap<Arrow,Cardinality> newCardinalities = new TreeMap<Arrow,Cardinality>(); // newCardinalities.putAll(compCop.getArrowCardinalities()); // // for (Arrow arrow : getTargetArrows()){ // newCardinalities.remove(arrow); // } // // return SpiderDiagrams.createCompleteCOPDiagram(compCop.getSpiders(), compCop.getHabitats(), // compCop.getShadedZones(), compCop.getPresentZones(), newArrows, compCop.getSpiderLabels(), // compCop.getCurveLabels(), newCardinalities, compCop.getSpiderComparators()); // }else{ // COPDiagram cop = (COPDiagram) psd; // // if (! cop.getArrows().containsAll(getTargetArrows())) { // throw new TransformationException("The arrows to be removed do not exist in the target diagram"); // } // newArrows.addAll(cop.getArrows()); // newArrows.removeAll(getTargetArrows()); // // return COPDiagram.createCOPDiagram(cop.getSpiders(), cop.getHabitats(), // cop.getShadedZones(), cop.getPresentZones(), newArrows); // } } return psd; } private List<Arrow> getTargetArrows() { ArrayList<Arrow> arrows = new ArrayList<>(); for (ArrowArg targetArrow : targetArrows) { arrows.add(targetArrow.getArrow()); } return arrows; } }
/* * #%L * GwtMaterial * %% * Copyright (C) 2015 - 2017 GwtMaterialDesign * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package gwt.material.design.client.resources; import com.google.gwt.core.client.GWT; import com.google.gwt.resources.client.ClientBundle; import com.google.gwt.resources.client.TextResource; public interface MaterialDebugResources extends ClientBundle { MaterialDebugResources INSTANCE = GWT.create(MaterialDebugResources.class); @Source("js/materialize-0.97.5.js") TextResource materializeJsDebug(); @Source("js/animation.js") TextResource animationJsDebug(); @Source("js/clipboard.js") TextResource clipboardJs(); @Source("js/app-installer.js") TextResource appInstallerJsDebug(); @Source("js/focus-visible.js") TextResource focusVisibleJsDebug(); }
/** * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at the * <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a> * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Initial code contributed and copyrighted by<br> * frentix GmbH, http://www.frentix.com * <p> */ package org.olat.course.archiver; import java.util.List; import org.olat.core.gui.UserRequest; import org.olat.core.gui.components.form.flexible.FormItemContainer; import org.olat.core.gui.components.form.flexible.elements.SingleSelection; import org.olat.core.gui.components.form.flexible.impl.FormBasicController; import org.olat.core.gui.components.form.flexible.impl.FormLayoutContainer; import org.olat.core.gui.control.Controller; import org.olat.core.gui.control.Event; import org.olat.core.gui.control.WindowControl; import org.olat.core.util.StringHelper; import org.olat.course.nodes.CourseNode; import org.olat.group.BusinessGroup; /** * * @author srosse, stephane.rosse@frentix.com, http://www.frentix.com * */ public class ChooseGroupController extends FormBasicController { private final List<CourseNode> courseNodes; private final List<BusinessGroup> relatedGroups; private SingleSelection selectGroupEl; public ChooseGroupController(UserRequest ureq, WindowControl wControl, List<CourseNode> courseNodes, List<BusinessGroup> relatedGroups) { super(ureq, wControl); this.courseNodes = courseNodes; this.relatedGroups = relatedGroups; initForm(ureq); } @Override protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) { setFormDescription("select.group.desc"); String[] theKeys = new String[1 + relatedGroups.size()]; theKeys[0] = "all"; String[] theValues = new String[1 + relatedGroups.size()]; theValues[0] = translate("select.group.all"); for(int i=0; i<relatedGroups.size(); i++) { theKeys[i+1] = Integer.toString(i); theValues[i+1] = StringHelper.escapeHtml(relatedGroups.get(i).getName()); } selectGroupEl = uifactory.addDropdownSingleselect("select.group", formLayout, theKeys, theValues, null); FormLayoutContainer buttonLayout = FormLayoutContainer.createButtonLayout("buttonLayout", getTranslator()); formLayout.add(buttonLayout); buttonLayout.setRootForm(mainForm); uifactory.addFormCancelButton("cancel", buttonLayout, ureq, getWindowControl()); uifactory.addFormSubmitButton("ok", buttonLayout); } public BusinessGroup getSelectedGroup() { if(!selectGroupEl.isOneSelected() || selectGroupEl.isSelected(0)) { return null; } String key = selectGroupEl.getSelectedKey(); if(StringHelper.isLong(key)) { int pos = Integer.parseInt(key); if(pos >= 0 && pos <relatedGroups.size()) { return relatedGroups.get(pos); } } return null; } public List<CourseNode> getCourseNodes() { return courseNodes; } @Override protected void formOK(UserRequest ureq) { fireEvent(ureq, Event.DONE_EVENT); } @Override protected void formCancelled(UserRequest ureq) { fireEvent(ureq, Event.CANCELLED_EVENT); } }