gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.limit;
import org.apache.drill.categories.OperatorTest;
import org.apache.drill.exec.physical.config.Limit;
import org.apache.drill.exec.physical.impl.MockRecordBatch;
import org.apache.drill.exec.physical.impl.BaseTestOpBatchEmitOutcome;
import org.apache.drill.exec.record.RecordBatch;
import org.apache.drill.exec.physical.rowSet.RowSet;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category(OperatorTest.class)
public class TestLimitBatchEmitOutcome extends BaseTestOpBatchEmitOutcome {
/**
* Test to show empty batch with both OK_NEW_SCHEMA and EMIT outcome is not ignored by Limit and is pass through to
* the downstream operator.
* @throws Throwable
*/
@Test
public void testLimitEmptyBatchEmitOutcome() throws Throwable {
inputContainer.add(emptyInputRowSet.container());
inputContainer.add(emptyInputRowSet.container());
inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
// Only set for this Test class
mockInputBatch.useUnnestKillHandlingForLimit(true);
final Limit limitConf = new Limit(null, 0, 1);
final LimitRecordBatch limitBatch = new LimitRecordBatch(limitConf, operatorFixture.getFragmentContext(),
mockInputBatch);
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
outputRecordCount += limitBatch.getRecordCount();
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.EMIT);
outputRecordCount += limitBatch.getRecordCount();
assertEquals(0, outputRecordCount);
}
/**
* Test to validate limit considers all the data until it sees EMIT outcome and return output batch with data that
* meets the limit criteria.
* @throws Throwable
*/
@Test
public void testLimitNonEmptyBatchEmitOutcome() throws Throwable {
inputContainer.add(emptyInputRowSet.container());
inputContainer.add(nonEmptyInputRowSet.container());
inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
// Only set for this Test class
mockInputBatch.useUnnestKillHandlingForLimit(true);
final Limit limitConf = new Limit(null, 0, 1);
final LimitRecordBatch limitBatch = new LimitRecordBatch(limitConf, operatorFixture.getFragmentContext(),
mockInputBatch);
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
outputRecordCount += limitBatch.getRecordCount();
assertEquals(0, outputRecordCount);
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.EMIT);
outputRecordCount += limitBatch.getRecordCount();
assertEquals(1, outputRecordCount);
}
/**
* Test to show that once a limit number of records is produced using first set of batches then on getting a batch
* with EMIT outcome, the limit state is again refreshed and applied to next set of batches with data.
* @throws Throwable
*/
@Test
public void testLimitResetsAfterFirstEmitOutcome() throws Throwable {
final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
.addRow(2, 20, "item2")
.addRow(3, 30, "item3")
.build();
inputContainer.add(nonEmptyInputRowSet.container());
inputContainer.add(emptyInputRowSet.container());
inputContainer.add(nonEmptyInputRowSet2.container());
inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
inputOutcomes.add(RecordBatch.IterOutcome.OK);
final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
// Only set for this Test class
mockInputBatch.useUnnestKillHandlingForLimit(true);
final Limit limitConf = new Limit(null, 0, 1);
final LimitRecordBatch limitBatch = new LimitRecordBatch(limitConf, operatorFixture.getFragmentContext(),
mockInputBatch);
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
assertEquals(1, limitBatch.getRecordCount());
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.EMIT);
// State refresh happens and limit again works on new data batches
assertEquals(0, limitBatch.getRecordCount());
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.OK);
assertEquals(1, limitBatch.getRecordCount());
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.NONE);
}
/**
* Test to show that when the limit number of records is found with first incoming batch, then next empty incoming
* batch with OK outcome is ignored, but the empty EMIT outcome batch is not ignored. Empty incoming batch with
* EMIT outcome produces empty output batch with EMIT outcome.
* @throws Throwable
*/
@Test
public void testLimitNonEmptyFirst_EmptyOKEmitOutcome() throws Throwable {
inputContainer.add(nonEmptyInputRowSet.container());
inputContainer.add(emptyInputRowSet.container());
inputContainer.add(emptyInputRowSet.container());
inputContainer.add(emptyInputRowSet.container());
inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
inputOutcomes.add(RecordBatch.IterOutcome.OK);
inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
inputOutcomes.add(RecordBatch.IterOutcome.NONE);
final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
// Only set for this Test class
mockInputBatch.useUnnestKillHandlingForLimit(true);
final Limit limitConf = new Limit(null, 0, 1);
final LimitRecordBatch limitBatch = new LimitRecordBatch(limitConf, operatorFixture.getFragmentContext(),
mockInputBatch);
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
assertEquals(1, limitBatch.getRecordCount());
// OK will not be received since it's was accompanied with empty batch
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.EMIT);
assertEquals(0, limitBatch.getRecordCount());
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.NONE);
}
/**
* Test to show that limit refreshes it's state after seeing first EMIT outcome and works on data batches following
* it as new set's of incoming batch and apply the limits rule from fresh on those. So for first set of batches with
* OK_NEW_SCHEMA and EMIT outcome but total number of records received being less than limit condition, it still
* produces an output with that many records (in this case 1 even though limit number of records is 2).
*
* After seeing EMIT, it refreshes it's state and operate on next input batches to again return limit number of
* records. So for 3rd batch with 2 records but with EMIT outcome it produces an output batch with 2 records not
* with 1 since state is refreshed.
* @throws Throwable
*/
@Test
public void testMultipleLimitWithEMITOutcome() throws Throwable {
final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
.addRow(2, 20, "item2")
.addRow(3, 30, "item3")
.build();
inputContainer.add(nonEmptyInputRowSet.container());
inputContainer.add(emptyInputRowSet.container());
inputContainer.add(nonEmptyInputRowSet2.container());
inputContainer.add(emptyInputRowSet.container());
inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
// Only set for this Test class
mockInputBatch.useUnnestKillHandlingForLimit(true);
final Limit limitConf = new Limit(null, 0, 2);
final LimitRecordBatch limitBatch = new LimitRecordBatch(limitConf, operatorFixture.getFragmentContext(),
mockInputBatch);
// first limit evaluation
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
assertEquals(1, limitBatch.getRecordCount());
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.EMIT);
assertEquals(0, limitBatch.getRecordCount());
// After seeing EMIT limit will refresh it's state and again evaluate limit on next set of input batches
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.EMIT);
assertEquals(2, limitBatch.getRecordCount());
// Since limit is hit it will return NONE
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.NONE);
}
/**
* Test shows that limit operates on multiple input batches until it finds limit number of records or it sees an
* EMIT outcome to refresh it's state.
* @throws Throwable
*/
@Test
public void testLimitNonEmptyFirst_NonEmptyOK_EmptyBatchEmitOutcome() throws Throwable {
final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
.addRow(2, 20, "item2")
.build();
inputContainer.add(nonEmptyInputRowSet.container());
inputContainer.add(emptyInputRowSet.container());
inputContainer.add(nonEmptyInputRowSet2.container());
inputContainer.add(emptyInputRowSet.container());
inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
inputOutcomes.add(RecordBatch.IterOutcome.OK);
inputOutcomes.add(RecordBatch.IterOutcome.OK);
inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
// Only set for this Test class
mockInputBatch.useUnnestKillHandlingForLimit(true);
final Limit limitConf = new Limit(null, 0, 2);
final LimitRecordBatch limitBatch = new LimitRecordBatch(limitConf, operatorFixture.getFragmentContext(),
mockInputBatch);
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
assertEquals(1, limitBatch.getRecordCount());
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.OK);
assertEquals(1, limitBatch.getRecordCount());
assertTrue(limitBatch.next() == RecordBatch.IterOutcome.EMIT);
assertEquals(0, limitBatch.getRecordCount());
nonEmptyInputRowSet2.clear();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import org.apache.camel.Exchange;
import org.apache.camel.InvalidPayloadException;
import org.apache.camel.Message;
import org.apache.camel.TypeConverter;
/**
* A base class for implementation inheritance providing the core
* {@link Message} body handling features but letting the derived class deal
* with headers.
*
* Unless a specific provider wishes to do something particularly clever with
* headers you probably want to just derive from {@link DefaultMessage}
*
* @version
*/
public abstract class MessageSupport implements Message {
private Exchange exchange;
private Object body;
private String messageId;
@Override
public String toString() {
// do not output information about the message as it may contain sensitive information
return String.format("Message[%s]", messageId == null ? "" : messageId);
}
public Object getBody() {
if (body == null) {
body = createBody();
}
return body;
}
public <T> T getBody(Class<T> type) {
return getBody(type, getBody());
}
public Object getMandatoryBody() throws InvalidPayloadException {
Object answer = getBody();
if (answer == null) {
throw new InvalidPayloadException(getExchange(), Object.class, this);
}
return answer;
}
protected <T> T getBody(Class<T> type, Object body) {
// eager same instance type test to avoid the overhead of invoking the type converter
// if already same type
if (type.isInstance(body)) {
return type.cast(body);
}
Exchange e = getExchange();
if (e != null) {
TypeConverter converter = e.getContext().getTypeConverter();
// lets first try converting the body itself first
// as for some types like InputStream v Reader its more efficient to do the transformation
// from the body itself as its got efficient implementations of them, before trying the message
T answer = converter.convertTo(type, e, body);
if (answer != null) {
return answer;
}
// fallback and try the message itself (e.g. used in camel-http)
answer = converter.tryConvertTo(type, e, this);
if (answer != null) {
return answer;
}
}
// not possible to convert
return null;
}
public <T> T getMandatoryBody(Class<T> type) throws InvalidPayloadException {
// eager same instance type test to avoid the overhead of invoking the type converter
// if already same type
if (type.isInstance(body)) {
return type.cast(body);
}
Exchange e = getExchange();
if (e != null) {
TypeConverter converter = e.getContext().getTypeConverter();
try {
return converter.mandatoryConvertTo(type, e, getBody());
} catch (Exception cause) {
throw new InvalidPayloadException(e, type, this, cause);
}
}
throw new InvalidPayloadException(e, type, this);
}
public void setBody(Object body) {
this.body = body;
}
public <T> void setBody(Object value, Class<T> type) {
Exchange e = getExchange();
if (e != null) {
T v = e.getContext().getTypeConverter().convertTo(type, e, value);
if (v != null) {
value = v;
}
}
setBody(value);
}
public Message copy() {
Message answer = newInstance();
answer.copyFrom(this);
return answer;
}
public void copyFrom(Message that) {
if (that == this) {
// the same instance so do not need to copy
return;
}
setMessageId(that.getMessageId());
setBody(that.getBody());
setFault(that.isFault());
// the headers may be the same instance if the end user has made some mistake
// and set the OUT message with the same header instance of the IN message etc
boolean sameHeadersInstance = false;
if (hasHeaders() && that.hasHeaders() && getHeaders() == that.getHeaders()) {
sameHeadersInstance = true;
}
if (!sameHeadersInstance) {
if (hasHeaders()) {
// okay its safe to clear the headers
getHeaders().clear();
}
if (that.hasHeaders()) {
getHeaders().putAll(that.getHeaders());
}
}
copyAttachments(that);
}
public Exchange getExchange() {
return exchange;
}
public void setExchange(Exchange exchange) {
this.exchange = exchange;
}
public void copyAttachments(Message that) {
// the attachments may be the same instance if the end user has made some mistake
// and set the OUT message with the same attachment instance of the IN message etc
boolean sameAttachments = false;
if (hasAttachments() && that.hasAttachments() && getAttachments() == that.getAttachments()) {
sameAttachments = true;
}
if (!sameAttachments) {
if (hasAttachments()) {
// okay its safe to clear the attachments
getAttachments().clear();
}
if (that.hasAttachments()) {
getAttachments().putAll(that.getAttachments());
}
}
}
/**
* Returns a new instance
*/
public abstract Message newInstance();
/**
* A factory method to allow a provider to lazily create the message body
* for inbound messages from other sources
*
* @return the value of the message body or null if there is no value
* available
*/
protected Object createBody() {
return null;
}
public String getMessageId() {
if (messageId == null) {
messageId = createMessageId();
}
return this.messageId;
}
public void setMessageId(String messageId) {
this.messageId = messageId;
}
/**
* Allow implementations to auto-create a messageId
*/
protected String createMessageId() {
String uuid = null;
if (exchange != null) {
uuid = exchange.getContext().getUuidGenerator().generateUuid();
}
// fall back to the simple UUID generator
if (uuid == null) {
uuid = new SimpleUuidGenerator().generateUuid();
}
return uuid;
}
}
| |
package org.easycluster.easylock.lifecycle;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.easycluster.easylock.DistributedLock;
import org.easycluster.easylock.LockStatus;
import org.easycluster.easylock.LockUpdateCallback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractLifecycle implements CoreLifecycle,
LockUpdateCallback {
private static final Logger LOGGER = LoggerFactory
.getLogger(AbstractLifecycle.class);
private LifecycleState state = LifecycleState.INITIAL;
private ReentrantReadWriteLock stateLock = new ReentrantReadWriteLock();
private DistributedLock distributedLock;
/**
* {@inheritDoc}
*/
@Override
public boolean isAllowTo(LifecycleState newState) {
stateLock.readLock().lock();
try {
switch (newState) {
case INITIAL:
if (state == LifecycleState.PREPARED) {
return true;
}
break;
case PREPARED:
if (state == LifecycleState.INITIAL
|| state == LifecycleState.ACTIVATED) {
return true;
}
break;
case ACTIVATED:
if (state == LifecycleState.PREPARED) {
return true;
}
break;
}
// now there is a problem
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Cannot change to state " + newState
+ " from state " + state);
}
} finally {
stateLock.readLock().unlock();
}
return false;
}
/**
* {@inheritDoc}
*/
@Override
public LifecycleState updateState(LifecycleState newState, long waitTime) {
if (newState == LifecycleState.INITIAL) {
// need to downgrade
if (getState() == LifecycleState.ACTIVATED) {
suspend();
waitState(this, LifecycleState.PREPARED, waitTime);
}
if (getState() == LifecycleState.PREPARED) {
release();
}
} else if (newState == LifecycleState.PREPARED) {
if (getState() == LifecycleState.ACTIVATED) {
// need to downgrade
suspend();
waitState(this, LifecycleState.PREPARED, waitTime);
}
if (getState() == LifecycleState.INITIAL) {
// need to upgrade
prepare();
}
} else if (newState == LifecycleState.ACTIVATED) {
// need to upgrade
if (getState() == LifecycleState.INITIAL) {
prepare();
}
if (getState() == LifecycleState.PREPARED) {
activate();
waitState(this, LifecycleState.ACTIVATED, waitTime);
}
}
return getState();
}
/**
* {@inheritDoc}
*/
@Override
public void setState(LifecycleState state) {
LifecycleState oldState = null;
// Assume the caller has check with the isAllowTo method
// for implementing state transition control
stateLock.writeLock().lock();
try {
oldState = this.state;
this.state = state;
} finally {
stateLock.writeLock().unlock();
}
// lifecycle is updated
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Changed state from " + oldState + " to " + state
+ " in component " + this);
}
}
/**
* {@inheritDoc}
*/
@Override
public LifecycleState getState() {
stateLock.readLock().lock();
try {
return state;
} finally {
stateLock.readLock().unlock();
}
}
/**
* {@inheritDoc}
*/
public final void prepare() {
if (isAllowTo(LifecycleState.PREPARED)) {
doPrepare();
setState(LifecycleState.PREPARED);
}
}
/**
* {@inheritDoc}
*/
public final void activate() {
if (isAllowTo(LifecycleState.ACTIVATED)) {
if (distributedLock != null) {
distributedLock.lock(this);
} else {
doActivate();
setState(LifecycleState.ACTIVATED);
}
}
}
/**
* {@inheritDoc}
*/
public final void suspend() {
if (isAllowTo(LifecycleState.PREPARED)) {
if (distributedLock != null) {
distributedLock.unlock();
} else {
doSuspend();
setState(LifecycleState.PREPARED);
}
}
}
/**
* {@inheritDoc}
*/
public final void release() {
if (isAllowTo(LifecycleState.INITIAL)) {
doRelease();
setState(LifecycleState.INITIAL);
}
}
/**
* {@inheritDoc}
*/
@Override
public void updateLockState(String lockId, LockStatus lockStatus) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Lock update callback on lockResource "
+ distributedLock.getResource() + " lockStatus "
+ lockStatus);
}
LifecycleState state = getState();
if (lockStatus == LockStatus.MASTER) {
boolean isActivated = true;
if (state != LifecycleState.ACTIVATED) {
if (isAllowTo(LifecycleState.ACTIVATED)) {
isActivated = doActivate();
if (isActivated) {
setState(LifecycleState.ACTIVATED);
} else {
// failed to activate the component
if (LOGGER.isWarnEnabled()) {
LOGGER.warn("Failed to activate component " + this);
}
}
} else {
if (LOGGER.isWarnEnabled()) {
LOGGER.warn("Unable to activate the component " + this
+ " with state " + state + " with lockStatue "
+ lockStatus);
}
isActivated = false;
}
} else {
if (LOGGER.isInfoEnabled()) {
LOGGER.info("The component " + this
+ " is already activated");
}
}
// so we need to release the lock
if (!isActivated && distributedLock != null) {
distributedLock.unlock();
}
} else if (lockStatus == LockStatus.STANDBY) {
// Now change the lifecycle to suspended
boolean isSuspended = true;
if (state == LifecycleState.ACTIVATED) {
if (isAllowTo(LifecycleState.PREPARED)) {
isSuspended = doSuspend();
if (isSuspended) {
setState(LifecycleState.PREPARED);
} else {
// failed to suspend the component
if (LOGGER.isWarnEnabled()) {
LOGGER.warn("Failed to suspend component " + this);
}
}
} else {
// the component cannot be suspended
if (LOGGER.isWarnEnabled()) {
LOGGER.warn("Unable to suspend the component " + this
+ " with state " + state + " with lockStatue "
+ lockStatus);
}
isSuspended = false;
}
// allow it to release the lock
if (isSuspended && distributedLock != null) {
// as long it is suspended, we need to release the lock
distributedLock.unlock();
}
}
} else {
// unknown lock status
if (LOGGER.isInfoEnabled()) {
LOGGER.info("The component " + this
+ " received unknown lock status " + lockStatus);
}
}
}
LifecycleState waitState(CoreLifecycle lifecycle,
LifecycleState targetState, long waitTime) {
long targetTime = System.currentTimeMillis() + waitTime;
LifecycleState currentState = lifecycle.getState();
while (currentState != targetState) {
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
currentState = lifecycle.getState();
if (System.currentTimeMillis() > targetTime) {
break;
}
}
return currentState;
}
/**
* Prepare the component now.
*
* @return
*/
protected boolean doPrepare() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Component " + this + " is preparing now");
}
return true;
}
/**
* Activate the component now.
*
* @return
*/
protected boolean doActivate() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Component " + this + " is activating now");
}
return true;
}
/**
* Suspend the component now.
*
* @return
*/
protected boolean doSuspend() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Component " + this + " is suspending now");
}
return true;
}
/**
* Disengage the component now.
*
* @return
*/
protected boolean doRelease() {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Component " + this + " is releasing now");
}
return true;
}
public void setDistributedLock(DistributedLock distributedLock) {
this.distributedLock = distributedLock;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(super.toString());
builder.append(",state=").append(getState());
return builder.toString();
}
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package com.android.webview.chromium;
import android.app.Application;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.os.Trace;
import android.util.SparseArray;
import android.view.View;
import java.lang.reflect.Method;
/**
* Factory class for {@link WebViewDelegate com.android.webview.chromium.WebViewDelegate}s.
*
* <p>{@link WebViewDelegate com.android.webview.chromium.WebViewDelegate}s provide the same
* interface as {@link android.webkit.WebViewDelegate android.webkit.WebViewDelegate} but without
* a dependency on the webkit class. Defining our own
* {@link WebViewDelegate com.android.webview.chromium.WebViewDelegate} in frameworks/webview
* allows the WebView apk to be binary compatible with the API 21 version of the framework, in
* which {@link android.webkit.WebViewDelegate android.webkit.WebViewDelegate} had not yet been
* introduced.
*
* <p>The {@link WebViewDelegate com.android.webview.chromium.WebViewDelegate} interface and this
* factory class can be removed once we don't longer need to support WebView apk updates to devices
* running the API 21 version of the framework. At that point, we should use
* {@link android.webkit.WebViewDelegate android.webkit.WebViewDelegate} directly instead.
*/
class WebViewDelegateFactory {
/**
* Copy of {@link android.webkit.WebViewDelegate android.webkit.WebViewDelegate}'s interface.
* See {@link WebViewDelegateFactory} for the reasons why this copy is needed.
*/
interface WebViewDelegate {
/** @see android.webkit.WebViewDelegate.OnTraceEnabledChangeListener */
interface OnTraceEnabledChangeListener {
void onTraceEnabledChange(boolean enabled);
}
/** @see android.webkit.WebViewDelegate#setOnTraceEnabledChangeListener */
void setOnTraceEnabledChangeListener(final OnTraceEnabledChangeListener listener);
/** @see android.webkit.WebViewDelegate#isTraceTagEnabled */
boolean isTraceTagEnabled();
/** @see android.webkit.WebViewDelegate#canInvokeDrawGlFunctor */
boolean canInvokeDrawGlFunctor(View containerView);
/** @see android.webkit.WebViewDelegate#invokeDrawGlFunctor */
void invokeDrawGlFunctor(
View containerView, long nativeDrawGLFunctor, boolean waitForCompletion);
/** @see android.webkit.WebViewDelegate#callDrawGlFunction */
void callDrawGlFunction(Canvas canvas, long nativeDrawGLFunctor);
/** @see android.webkit.WebViewDelegate#detachDrawGlFunctor */
void detachDrawGlFunctor(View containerView, long nativeDrawGLFunctor);
/** @see android.webkit.WebViewDelegate#getPackageId */
int getPackageId(Resources resources, String packageName);
/** @see android.webkit.WebViewDelegate#getApplication */
Application getApplication();
/** @see android.webkit.WebViewDelegate#getErrorString */
String getErrorString(Context context, int errorCode);
/** @see android.webkit.WebViewDelegate#addWebViewAssetPath */
void addWebViewAssetPath(Context context);
}
/**
* Creates a {@link WebViewDelegate com.android.webview.chromium.WebViewDelegate} that proxies
* requests to the given {@link android.webkit.WebViewDelegate android.webkit.WebViewDelegate}.
*
* @return the created delegate
*/
static WebViewDelegate createProxyDelegate(android.webkit.WebViewDelegate delegate) {
return new ProxyDelegate(delegate);
}
/**
* Creates a {@link WebViewDelegate com.android.webview.chromium.WebViewDelegate} compatible
* with the API 21 version of the framework in which
* {@link android.webkit.WebViewDelegate android.webkit.WebViewDelegate} had not yet been
* introduced.
*
* @return the created delegate
*/
static WebViewDelegate createApi21CompatibilityDelegate() {
return new Api21CompatibilityDelegate();
}
/**
* A {@link WebViewDelegate com.android.webview.chromium.WebViewDelegate} that proxies requests
* to a {@link android.webkit.WebViewDelegate android.webkit.WebViewDelegate}.
*/
private static class ProxyDelegate implements WebViewDelegate {
android.webkit.WebViewDelegate mDelegate;
ProxyDelegate(android.webkit.WebViewDelegate delegate) {
mDelegate = delegate;
}
@Override
public void setOnTraceEnabledChangeListener(final OnTraceEnabledChangeListener listener) {
mDelegate.setOnTraceEnabledChangeListener(
new android.webkit.WebViewDelegate.OnTraceEnabledChangeListener() {
@Override
public void onTraceEnabledChange(boolean enabled) {
listener.onTraceEnabledChange(enabled);
}
});
}
@Override
public boolean isTraceTagEnabled() {
return mDelegate.isTraceTagEnabled();
}
@Override
public boolean canInvokeDrawGlFunctor(View containerView) {
return mDelegate.canInvokeDrawGlFunctor(containerView);
}
@Override
public void invokeDrawGlFunctor(
View containerView, long nativeDrawGLFunctor, boolean waitForCompletion) {
mDelegate.invokeDrawGlFunctor(containerView, nativeDrawGLFunctor, waitForCompletion);
}
@Override
public void callDrawGlFunction(Canvas canvas, long nativeDrawGLFunctor) {
mDelegate.callDrawGlFunction(canvas, nativeDrawGLFunctor);
}
@Override
public void detachDrawGlFunctor(View containerView, long nativeDrawGLFunctor) {
mDelegate.detachDrawGlFunctor(containerView, nativeDrawGLFunctor);
}
@Override
public int getPackageId(Resources resources, String packageName) {
return mDelegate.getPackageId(resources, packageName);
}
@Override
public Application getApplication() {
return mDelegate.getApplication();
}
@Override
public String getErrorString(Context context, int errorCode) {
return mDelegate.getErrorString(context, errorCode);
}
@Override
public void addWebViewAssetPath(Context context) {
mDelegate.addWebViewAssetPath(context);
}
}
/**
* A {@link WebViewDelegate com.android.webview.chromium.WebViewDelegate} compatible with the
* API 21 version of the framework in which
* {@link android.webkit.WebViewDelegate android.webkit.WebViewDelegate} had not yet been
* introduced.
*
* <p>This class implements the
* {@link WebViewDelegate com.android.webview.chromium.WebViewDelegate} functionality by using
* reflection to call into hidden frameworks APIs released in the API-21 version of the
* framework.
*/
private static class Api21CompatibilityDelegate implements WebViewDelegate {
/** Copy of Trace.TRACE_TAG_WEBVIEW */
private static final long TRACE_TAG_WEBVIEW = 1L << 4;
/** Hidden APIs released in the API 21 version of the framework */
private final Method mIsTagEnabledMethod;
private final Method mAddChangeCallbackMethod;
private final Method mGetViewRootImplMethod;
private final Method mInvokeFunctorMethod;
private final Method mCallDrawGLFunctionMethod;
private final Method mDetachFunctorMethod;
private final Method mGetAssignedPackageIdentifiersMethod;
private final Method mAddAssetPathMethod;
private final Method mCurrentApplicationMethod;
private final Method mGetStringMethod;
private final Method mGetLoadedPackageInfoMethod;
Api21CompatibilityDelegate() {
try {
// Important: This reflection essentially defines a snapshot of some hidden APIs
// at version 21 of the framework for compatibility reasons, and the reflection
// should not be changed even if those hidden APIs change in future releases.
mIsTagEnabledMethod = Trace.class.getMethod("isTagEnabled", long.class);
mAddChangeCallbackMethod = Class.forName("android.os.SystemProperties")
.getMethod("addChangeCallback", Runnable.class);
mGetViewRootImplMethod = View.class.getMethod("getViewRootImpl");
mInvokeFunctorMethod =
Class.forName("android.view.ViewRootImpl")
.getMethod("invokeFunctor", long.class, boolean.class);
mDetachFunctorMethod = Class.forName("android.view.ViewRootImpl")
.getMethod("detachFunctor", long.class);
mCallDrawGLFunctionMethod = Class.forName("android.view.HardwareCanvas")
.getMethod("callDrawGLFunction", long.class);
mGetAssignedPackageIdentifiersMethod =
AssetManager.class.getMethod("getAssignedPackageIdentifiers");
mAddAssetPathMethod = AssetManager.class.getMethod("addAssetPath", String.class);
mCurrentApplicationMethod =
Class.forName("android.app.ActivityThread").getMethod("currentApplication");
mGetStringMethod = Class.forName("android.net.http.ErrorStrings")
.getMethod("getString", int.class, Context.class);
mGetLoadedPackageInfoMethod = Class.forName("android.webkit.WebViewFactory")
.getMethod("getLoadedPackageInfo");
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public void setOnTraceEnabledChangeListener(final OnTraceEnabledChangeListener listener) {
try {
mAddChangeCallbackMethod.invoke(null, new Runnable() {
@Override
public void run() {
listener.onTraceEnabledChange(isTraceTagEnabled());
}
});
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public boolean isTraceTagEnabled() {
try {
return ((Boolean) mIsTagEnabledMethod.invoke(null, TRACE_TAG_WEBVIEW));
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public boolean canInvokeDrawGlFunctor(View containerView) {
try {
Object viewRootImpl = mGetViewRootImplMethod.invoke(containerView);
// viewRootImpl can be null during teardown when window is leaked.
return viewRootImpl != null;
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public void invokeDrawGlFunctor(
View containerView, long nativeDrawGLFunctor, boolean waitForCompletion) {
try {
Object viewRootImpl = mGetViewRootImplMethod.invoke(containerView);
if (viewRootImpl != null) {
mInvokeFunctorMethod.invoke(
viewRootImpl, nativeDrawGLFunctor, waitForCompletion);
}
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public void callDrawGlFunction(Canvas canvas, long nativeDrawGLFunctor) {
try {
mCallDrawGLFunctionMethod.invoke(canvas, nativeDrawGLFunctor);
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public void detachDrawGlFunctor(View containerView, long nativeDrawGLFunctor) {
try {
Object viewRootImpl = mGetViewRootImplMethod.invoke(containerView);
if (viewRootImpl != null) {
mDetachFunctorMethod.invoke(viewRootImpl, nativeDrawGLFunctor);
}
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public int getPackageId(Resources resources, String packageName) {
try {
SparseArray packageIdentifiers =
(SparseArray) mGetAssignedPackageIdentifiersMethod.invoke(
resources.getAssets());
for (int i = 0; i < packageIdentifiers.size(); i++) {
final String name = (String) packageIdentifiers.valueAt(i);
if (packageName.equals(name)) {
return packageIdentifiers.keyAt(i);
}
}
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
throw new RuntimeException("Package not found: " + packageName);
}
@Override
public Application getApplication() {
try {
return (Application) mCurrentApplicationMethod.invoke(null);
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public String getErrorString(Context context, int errorCode) {
try {
return (String) mGetStringMethod.invoke(null, errorCode, context);
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
@Override
public void addWebViewAssetPath(Context context) {
try {
PackageInfo info = (PackageInfo) mGetLoadedPackageInfoMethod.invoke(null);
mAddAssetPathMethod.invoke(context.getAssets(), info.applicationInfo.sourceDir);
} catch (Exception e) {
throw new RuntimeException("Invalid reflection", e);
}
}
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf;
import static com.google.protobuf.FieldInfo.forField;
import static com.google.protobuf.FieldInfo.forMapField;
import static com.google.protobuf.FieldInfo.forOneofMemberField;
import static com.google.protobuf.FieldInfo.forRepeatedMessageField;
import com.google.protobuf.testing.Proto2Testing.Proto2MessageWithMaps;
import com.google.protobuf.testing.Proto3Testing.Proto3Empty;
import com.google.protobuf.testing.Proto3Testing.Proto3Message;
import com.google.protobuf.testing.Proto3Testing.Proto3MessageWithMaps;
import java.lang.reflect.Field;
/** A factory that generates a hard-coded info for {@link Proto3Message}. */
public final class Proto3MessageInfoFactory implements MessageInfoFactory {
private static final Proto3MessageInfoFactory INSTANCE = new Proto3MessageInfoFactory();
private Proto3MessageInfoFactory() {}
public static Proto3MessageInfoFactory getInstance() {
return INSTANCE;
}
@Override
public boolean isSupported(Class<?> clazz) {
return true;
}
@Override
public MessageInfo messageInfoFor(Class<?> clazz) {
if (Proto3Message.class.isAssignableFrom(clazz)) {
return newMessageInfoForProto3Message();
} else if (Proto3Empty.class.isAssignableFrom(clazz)) {
return newMessageInfoForProto3Empty();
} else if (Proto3MessageWithMaps.class.isAssignableFrom(clazz)) {
return newMessageInfoForProto3MessageWithMaps();
} else {
throw new IllegalArgumentException("Unsupported class: " + clazz.getName());
}
}
/**
* Creates a new hard-coded info for {@link Proto3Message}. Each time this is called, we manually
* go through the entire process of what a message would do if it self-registered its own info,
* including looking up each field by name. This is done for benchmarking purposes, so that we get
* a more accurate representation of the time it takes to perform this process.
*/
private static StructuralMessageInfo newMessageInfoForProto3Message() {
StructuralMessageInfo.Builder builder = StructuralMessageInfo.newBuilder(48);
lookupFieldsByName(builder);
return builder.build();
}
private static void lookupFieldsByName(StructuralMessageInfo.Builder builder) {
builder.withDefaultInstance(Proto3Message.getDefaultInstance());
builder.withSyntax(ProtoSyntax.PROTO3);
builder.withField(forField(field("fieldDouble1_"), 1, FieldType.DOUBLE, true));
builder.withField(forField(field("fieldFloat2_"), 2, FieldType.FLOAT, true));
builder.withField(forField(field("fieldInt643_"), 3, FieldType.INT64, true));
builder.withField(forField(field("fieldUint644_"), 4, FieldType.UINT64, true));
builder.withField(forField(field("fieldInt325_"), 5, FieldType.INT32, true));
builder.withField(forField(field("fieldFixed646_"), 6, FieldType.FIXED64, true));
builder.withField(forField(field("fieldFixed327_"), 7, FieldType.FIXED32, true));
builder.withField(forField(field("fieldBool8_"), 8, FieldType.BOOL, true));
builder.withField(forField(field("fieldString9_"), 9, FieldType.STRING, true));
builder.withField(forField(field("fieldMessage10_"), 10, FieldType.MESSAGE, true));
builder.withField(forField(field("fieldBytes11_"), 11, FieldType.BYTES, true));
builder.withField(forField(field("fieldUint3212_"), 12, FieldType.UINT32, true));
builder.withField(forField(field("fieldEnum13_"), 13, FieldType.ENUM, true));
builder.withField(forField(field("fieldSfixed3214_"), 14, FieldType.SFIXED32, true));
builder.withField(forField(field("fieldSfixed6415_"), 15, FieldType.SFIXED64, true));
builder.withField(forField(field("fieldSint3216_"), 16, FieldType.SINT32, true));
builder.withField(forField(field("fieldSint6417_"), 17, FieldType.SINT64, true));
builder.withField(forField(field("fieldDoubleList18_"), 18, FieldType.DOUBLE_LIST, true));
builder.withField(forField(field("fieldFloatList19_"), 19, FieldType.FLOAT_LIST, true));
builder.withField(forField(field("fieldInt64List20_"), 20, FieldType.INT64_LIST, true));
builder.withField(forField(field("fieldUint64List21_"), 21, FieldType.UINT64_LIST, true));
builder.withField(forField(field("fieldInt32List22_"), 22, FieldType.INT32_LIST, true));
builder.withField(forField(field("fieldFixed64List23_"), 23, FieldType.FIXED64_LIST, true));
builder.withField(forField(field("fieldFixed32List24_"), 24, FieldType.FIXED32_LIST, true));
builder.withField(forField(field("fieldBoolList25_"), 25, FieldType.BOOL_LIST, true));
builder.withField(forField(field("fieldStringList26_"), 26, FieldType.STRING_LIST, true));
builder.withField(
forRepeatedMessageField(
field("fieldMessageList27_"), 27, FieldType.MESSAGE_LIST, Proto3Message.class));
builder.withField(forField(field("fieldBytesList28_"), 28, FieldType.BYTES_LIST, true));
builder.withField(forField(field("fieldUint32List29_"), 29, FieldType.UINT32_LIST, true));
builder.withField(forField(field("fieldEnumList30_"), 30, FieldType.ENUM_LIST, true));
builder.withField(forField(field("fieldSfixed32List31_"), 31, FieldType.SFIXED32_LIST, true));
builder.withField(forField(field("fieldSfixed64List32_"), 32, FieldType.SFIXED64_LIST, true));
builder.withField(forField(field("fieldSint32List33_"), 33, FieldType.SINT32_LIST, true));
builder.withField(forField(field("fieldSint64List34_"), 34, FieldType.SINT64_LIST, true));
builder.withField(
forField(field("fieldDoubleListPacked35_"), 35, FieldType.DOUBLE_LIST_PACKED, true));
builder.withField(
forField(field("fieldFloatListPacked36_"), 36, FieldType.FLOAT_LIST_PACKED, true));
builder.withField(
forField(field("fieldInt64ListPacked37_"), 37, FieldType.INT64_LIST_PACKED, true));
builder.withField(
forField(field("fieldUint64ListPacked38_"), 38, FieldType.UINT64_LIST_PACKED, true));
builder.withField(
forField(field("fieldInt32ListPacked39_"), 39, FieldType.INT32_LIST_PACKED, true));
builder.withField(
forField(field("fieldFixed64ListPacked40_"), 40, FieldType.FIXED64_LIST_PACKED, true));
builder.withField(
forField(field("fieldFixed32ListPacked41_"), 41, FieldType.FIXED32_LIST_PACKED, true));
builder.withField(
forField(field("fieldBoolListPacked42_"), 42, FieldType.BOOL_LIST_PACKED, true));
builder.withField(
forField(field("fieldUint32ListPacked43_"), 43, FieldType.UINT32_LIST_PACKED, true));
builder.withField(
forField(field("fieldEnumListPacked44_"), 44, FieldType.ENUM_LIST_PACKED, true));
builder.withField(
forField(field("fieldSfixed32ListPacked45_"), 45, FieldType.SFIXED32_LIST_PACKED, true));
builder.withField(
forField(field("fieldSfixed64ListPacked46_"), 46, FieldType.SFIXED64_LIST_PACKED, true));
builder.withField(
forField(field("fieldSint32ListPacked47_"), 47, FieldType.SINT32_LIST_PACKED, true));
builder.withField(
forField(field("fieldSint64ListPacked48_"), 48, FieldType.SINT64_LIST_PACKED, true));
OneofInfo oneof = new OneofInfo(0, field("testOneofCase_"), field("testOneof_"));
builder.withField(forOneofMemberField(53, FieldType.DOUBLE, oneof, Double.class, true, null));
builder.withField(forOneofMemberField(54, FieldType.FLOAT, oneof, Float.class, true, null));
builder.withField(forOneofMemberField(55, FieldType.INT64, oneof, Long.class, true, null));
builder.withField(forOneofMemberField(56, FieldType.UINT64, oneof, Long.class, true, null));
builder.withField(forOneofMemberField(57, FieldType.INT32, oneof, Integer.class, true, null));
builder.withField(forOneofMemberField(58, FieldType.FIXED64, oneof, Long.class, true, null));
builder.withField(forOneofMemberField(59, FieldType.FIXED32, oneof, Integer.class, true, null));
builder.withField(forOneofMemberField(60, FieldType.BOOL, oneof, Boolean.class, true, null));
builder.withField(forOneofMemberField(61, FieldType.STRING, oneof, String.class, true, null));
builder.withField(
forOneofMemberField(62, FieldType.MESSAGE, oneof, Proto3Message.class, true, null));
builder.withField(
forOneofMemberField(63, FieldType.BYTES, oneof, ByteString.class, true, null));
builder.withField(forOneofMemberField(64, FieldType.UINT32, oneof, Integer.class, true, null));
builder.withField(
forOneofMemberField(65, FieldType.SFIXED32, oneof, Integer.class, true, null));
builder.withField(forOneofMemberField(66, FieldType.SFIXED64, oneof, Long.class, true, null));
builder.withField(forOneofMemberField(67, FieldType.SINT32, oneof, Integer.class, true, null));
builder.withField(forOneofMemberField(68, FieldType.SINT64, oneof, Long.class, true, null));
}
private StructuralMessageInfo newMessageInfoForProto3Empty() {
StructuralMessageInfo.Builder builder = StructuralMessageInfo.newBuilder(1);
builder.withSyntax(ProtoSyntax.PROTO3);
return builder.build();
}
private StructuralMessageInfo newMessageInfoForProto3MessageWithMaps() {
StructuralMessageInfo.Builder builder = StructuralMessageInfo.newBuilder();
builder.withSyntax(ProtoSyntax.PROTO3);
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_bool_1", 1));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_bytes_2", 2));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_double_3", 3));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_enum_4", 4));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_fixed32_5", 5));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_fixed64_6", 6));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_float_7", 7));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_int32_8", 8));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_int64_9", 9));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_message_10", 10));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_sfixed32_11", 11));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_sfixed64_12", 12));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_sint32_13", 13));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_sint64_14", 14));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_string_15", 15));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_uint32_16", 16));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_bool_uint64_17", 17));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_bool_18", 18));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_bytes_19", 19));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_double_20", 20));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_enum_21", 21));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_fixed32_22", 22));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_fixed64_23", 23));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_float_24", 24));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_int32_25", 25));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_int64_26", 26));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_message_27", 27));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_sfixed32_28", 28));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_sfixed64_29", 29));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_sint32_30", 30));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_sint64_31", 31));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_string_32", 32));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_uint32_33", 33));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed32_uint64_34", 34));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_bool_35", 35));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_bytes_36", 36));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_double_37", 37));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_enum_38", 38));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_fixed32_39", 39));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_fixed64_40", 40));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_float_41", 41));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_int32_42", 42));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_int64_43", 43));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_message_44", 44));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_sfixed32_45", 45));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_sfixed64_46", 46));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_sint32_47", 47));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_sint64_48", 48));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_string_49", 49));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_uint32_50", 50));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_fixed64_uint64_51", 51));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_bool_52", 52));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_bytes_53", 53));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_double_54", 54));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_enum_55", 55));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_fixed32_56", 56));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_fixed64_57", 57));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_float_58", 58));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_int32_59", 59));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_int64_60", 60));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_message_61", 61));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_sfixed32_62", 62));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_sfixed64_63", 63));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_sint32_64", 64));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_sint64_65", 65));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_string_66", 66));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_uint32_67", 67));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int32_uint64_68", 68));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_bool_69", 69));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_bytes_70", 70));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_double_71", 71));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_enum_72", 72));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_fixed32_73", 73));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_fixed64_74", 74));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_float_75", 75));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_int32_76", 76));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_int64_77", 77));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_message_78", 78));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_sfixed32_79", 79));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_sfixed64_80", 80));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_sint32_81", 81));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_sint64_82", 82));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_string_83", 83));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_uint32_84", 84));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_int64_uint64_85", 85));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_bool_86", 86));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_bytes_87", 87));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_double_88", 88));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_enum_89", 89));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_fixed32_90", 90));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_fixed64_91", 91));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_float_92", 92));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_int32_93", 93));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_int64_94", 94));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_message_95", 95));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_sfixed32_96", 96));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_sfixed64_97", 97));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_sint32_98", 98));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_sint64_99", 99));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_string_100", 100));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_uint32_101", 101));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed32_uint64_102", 102));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_bool_103", 103));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_bytes_104", 104));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_double_105", 105));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_enum_106", 106));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_fixed32_107", 107));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_fixed64_108", 108));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_float_109", 109));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_int32_110", 110));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_int64_111", 111));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_message_112", 112));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_sfixed32_113", 113));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_sfixed64_114", 114));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_sint32_115", 115));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_sint64_116", 116));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_string_117", 117));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_uint32_118", 118));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sfixed64_uint64_119", 119));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_bool_120", 120));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_bytes_121", 121));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_double_122", 122));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_enum_123", 123));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_fixed32_124", 124));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_fixed64_125", 125));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_float_126", 126));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_int32_127", 127));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_int64_128", 128));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_message_129", 129));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_sfixed32_130", 130));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_sfixed64_131", 131));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_sint32_132", 132));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_sint64_133", 133));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_string_134", 134));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_uint32_135", 135));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint32_uint64_136", 136));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_bool_137", 137));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_bytes_138", 138));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_double_139", 139));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_enum_140", 140));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_fixed32_141", 141));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_fixed64_142", 142));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_float_143", 143));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_int32_144", 144));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_int64_145", 145));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_message_146", 146));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_sfixed32_147", 147));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_sfixed64_148", 148));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_sint32_149", 149));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_sint64_150", 150));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_string_151", 151));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_uint32_152", 152));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_sint64_uint64_153", 153));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_bool_154", 154));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_bytes_155", 155));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_double_156", 156));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_enum_157", 157));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_fixed32_158", 158));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_fixed64_159", 159));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_float_160", 160));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_int32_161", 161));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_int64_162", 162));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_message_163", 163));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_sfixed32_164", 164));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_sfixed64_165", 165));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_sint32_166", 166));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_sint64_167", 167));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_string_168", 168));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_uint32_169", 169));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_string_uint64_170", 170));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_bool_171", 171));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_bytes_172", 172));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_double_173", 173));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_enum_174", 174));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_fixed32_175", 175));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_fixed64_176", 176));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_float_177", 177));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_int32_178", 178));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_int64_179", 179));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_message_180", 180));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_sfixed32_181", 181));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_sfixed64_182", 182));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_sint32_183", 183));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_sint64_184", 184));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_string_185", 185));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_uint32_186", 186));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint32_uint64_187", 187));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_bool_188", 188));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_bytes_189", 189));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_double_190", 190));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_enum_191", 191));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_fixed32_192", 192));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_fixed64_193", 193));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_float_194", 194));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_int32_195", 195));
builder.withField(mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_int64_196", 196));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_message_197", 197));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_sfixed32_198", 198));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_sfixed64_199", 199));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_sint32_200", 200));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_sint64_201", 201));
builder.withField(
mapFieldInfo(Proto3MessageWithMaps.class, "field_map_uint64_string_202", 202));
builder.withField(
mapFieldInfo(Proto2MessageWithMaps.class, "field_map_uint64_uint32_203", 203));
builder.withField(
mapFieldInfo(Proto2MessageWithMaps.class, "field_map_uint64_uint64_204", 204));
return builder.build();
}
private static Field field(String name) {
return field(Proto3Message.class, name);
}
private static Field field(Class<?> clazz, String name) {
try {
return clazz.getDeclaredField(name);
} catch (NoSuchFieldException | SecurityException e) {
throw new RuntimeException(e);
}
}
private static FieldInfo mapFieldInfo(Class<?> clazz, String fieldName, int fieldNumber) {
try {
return forMapField(
field(clazz, SchemaUtil.toCamelCase(fieldName, false) + "_"),
fieldNumber,
SchemaUtil.getMapDefaultEntry(clazz, fieldName),
null);
} catch (Throwable t) {
throw new RuntimeException(t);
}
}
}
| |
/**
* Copyright (C) 2011, Karsten Priegnitz
*
* Permission to use, copy, modify, and distribute this piece of software
* for any purpose with or without fee is hereby granted, provided that
* the above copyright notice and this permission notice appear in the
* source code of all copies.
*
* It would be appreciated if you mention the author in your change log,
* contributors list or the like.
*
* @author: Karsten Priegnitz
* @see: http://code.google.com/p/android-change-log/
*/
package sheetrock.panda.changelog;
// Next line is included by Andrey Moiseev, for proper build
import ru.o2genum.howtosay.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.pm.PackageManager.NameNotFoundException;
import android.preference.PreferenceManager;
import android.util.Log;
import android.webkit.WebView;
public class ChangeLog {
private final Context context;
private String lastVersion, thisVersion;
// this is the key for storing the version name in SharedPreferences
private static final String VERSION_KEY = "PREFS_VERSION_KEY";
/**
* Constructor
*
* Retrieves the version names and stores the new version name in
* SharedPreferences
*
* @param context
*/
public ChangeLog(Context context) {
this(context, PreferenceManager.getDefaultSharedPreferences(context));
}
/**
* Constructor
*
* Retrieves the version names and stores the new version name in
* SharedPreferences
*
* @param context
* @param sp the shared preferences to store the last version name into
*/
public ChangeLog(Context context, SharedPreferences sp) {
this.context = context;
// get version numbers
this.lastVersion = sp.getString(VERSION_KEY, "");
Log.d(TAG, "lastVersion: " + lastVersion);
try {
this.thisVersion = context.getPackageManager().getPackageInfo(
context.getPackageName(), 0).versionName;
} catch (NameNotFoundException e) {
this.thisVersion = "?";
Log.e(TAG, "could not get version name from manifest!");
e.printStackTrace();
}
Log.d(TAG, "appVersion: " + this.thisVersion);
// save new version number to preferences
SharedPreferences.Editor editor = sp.edit();
editor.putString(VERSION_KEY, this.thisVersion);
editor.commit();
}
/**
* @return The version name of the last installation of this app (as
* described in the former manifest). This will be the same as
* returned by <code>getThisVersion()</code> the second time
* this version of the app is launched (more precisely: the
* second time ChangeLog is instantiated).
* @see AndroidManifest.xml#android:versionName
*/
public String getLastVersion() {
return this.lastVersion;
}
/**
* @return The version name of this app as described in the manifest.
* @see AndroidManifest.xml#android:versionName
*/
public String getThisVersion() {
return this.thisVersion;
}
/**
* @return <code>true</code> if this version of your app is started the
* first time
*/
public boolean firstRun() {
return ! this.lastVersion.equals(this.thisVersion);
}
/**
* @return <code>true</code> if your app is started the first time ever.
* Also <code>true</code> if your app was deinstalled and
* installed again.
*/
public boolean firstRunEver() {
return "".equals(this.lastVersion);
}
/**
* @return an AlertDialog displaying the changes since the previous
* installed version of your app (what's new).
*/
public AlertDialog getLogDialog() {
return this.getDialog(false);
}
/**
* @return an AlertDialog with a full change log displayed
*/
public AlertDialog getFullLogDialog() {
return this.getDialog(true);
}
private AlertDialog getDialog(boolean full) {
WebView wv = new WebView(this.context);
wv.setBackgroundColor(0); // transparent
// wv.getSettings().setDefaultTextEncodingName("utf-8");
wv.loadDataWithBaseURL(null, this.getLog(full), "text/html", "UTF-8", null);
AlertDialog.Builder builder = new AlertDialog.Builder(this.context);
builder.setTitle(context.getResources().getString(full
? R.string.changelog_full_title
: R.string.changelog_title))
.setView(wv)
.setCancelable(false)
.setPositiveButton(
context.getResources().getString(
R.string.changelog_ok_button),
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
return builder.create();
}
/**
* @return HTML displaying the changes since the previous
* installed version of your app (what's new)
*/
public String getLog() {
return this.getLog(false);
}
/**
* @return HTML which displays full change log
*/
public String getFullLog() {
return this.getLog(true);
}
/** modes for HTML-Lists (bullet, numbered) */
private enum Listmode {
NONE,
ORDERED,
UNORDERED,
};
private Listmode listMode = Listmode.NONE;
private StringBuffer sb = null;
private static final String EOCL = "END_OF_CHANGE_LOG";
private String getLog(boolean full) {
// read changelog.txt file
sb = new StringBuffer();
try {
InputStream ins = context.getResources().openRawResource(R.raw.changelog);
BufferedReader br = new BufferedReader(new InputStreamReader(ins));
String line = null;
boolean advanceToEOVS = false; // if true: ignore further version sections
while ((line = br.readLine()) != null) {
line = line.trim();
char marker = line.length() > 0 ? line.charAt(0) : 0;
if (marker == '$') {
// begin of a version section
this.closeList();
String version = line.substring(1).trim();
// stop output?
if (! full) {
if (this.lastVersion.equals(version)) {
advanceToEOVS = true;
} else if (version.equals(EOCL)) {
advanceToEOVS = false;
}
}
} else if (! advanceToEOVS) {
switch (marker) {
case '%':
// line contains version title
this.closeList();
sb.append("<div class='title'>" + line.substring(1).trim() + "</div>\n");
break;
case '_':
// line contains version title
this.closeList();
sb.append("<div class='subtitle'>" + line.substring(1).trim() + "</div>\n");
break;
case '!':
// line contains free text
this.closeList();
sb.append("<div class='freetext'>" + line.substring(1).trim() + "</div>\n");
break;
case '#':
// line contains numbered list item
this.openList(Listmode.ORDERED);
sb.append("<li>" + line.substring(1).trim() + "</li>\n");
break;
case '*':
// line contains bullet list item
this.openList(Listmode.UNORDERED);
sb.append("<li>" + line.substring(1).trim() + "</li>\n");
break;
default:
// no special character: just use line as is
this.closeList();
sb.append(line + "\n");
}
}
}
this.closeList();
br.close();
} catch (IOException e) {
e.printStackTrace();
}
return sb.toString();
}
private void openList(Listmode listMode) {
if (this.listMode != listMode) {
closeList();
if (listMode == Listmode.ORDERED) {
sb.append("<div class='list'><ol>\n");
} else if (listMode == Listmode.UNORDERED) {
sb.append("<div class='list'><ul>\n");
}
this.listMode = listMode;
}
}
private void closeList() {
if (this.listMode == Listmode.ORDERED) {
sb.append("</ol></div>\n");
} else if (this.listMode == Listmode.UNORDERED) {
sb.append("</ul></div>\n");
}
this.listMode = Listmode.NONE;
}
private static final String TAG = "ChangeLog";
/**
* manually set the last version name - for testing purposes only
* @param lastVersion
*/
void setLastVersion(String lastVersion) {
this.lastVersion = lastVersion;
}
}
| |
/*
* Copyright (C) 2014 Abhishek
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package userInterface.pharmaceuticalCompany;
import baseClasses.enterprise.CityEnterprise;
import baseClasses.enterprise.Enterprise;
import baseClasses.enterprise.PharmaceuticalCompanyEnterprise;
import baseClasses.network.Network;
import baseClasses.organization.Organization;
import baseClasses.userAccount.UserAccount;
import java.awt.CardLayout;
import javax.swing.JPanel;
/**
*
* @author Abhishek
*/
public class PharmaceuticalAdminWorkArea extends javax.swing.JPanel {
/**
* Creates new form GovernmentAdminWorkArea
*/
private Network internationalNetwork;
private PharmaceuticalCompanyEnterprise pharmaceuticalCompanyEnterprise;
private Organization inOrganization;
//private CountryEnterprise countryEnterprise;
private UserAccount userAccount;
private CityEnterprise inCityEnterprise;
public PharmaceuticalAdminWorkArea(Network internationalNetwork, PharmaceuticalCompanyEnterprise pharmaceuticalCompanyEnterprise,CityEnterprise inCityEnterprise, UserAccount userAccount) {
initComponents();
this.internationalNetwork = internationalNetwork;
this.pharmaceuticalCompanyEnterprise = pharmaceuticalCompanyEnterprise;
this.inOrganization = inOrganization;
//this.countryEnterprise = countryEnterprise;
this.userAccount = userAccount;
this.inCityEnterprise = inCityEnterprise;
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jSplitPane1 = new javax.swing.JSplitPane();
jPanel1 = new javax.swing.JPanel();
btnAddNewManufacturingOrganization = new javax.swing.JButton();
btnManageManufacturingOrganizations = new javax.swing.JButton();
btnAddNewDistributorOrganization = new javax.swing.JButton();
btnManageDistributorOrganizations = new javax.swing.JButton();
btnAddNewDrug = new javax.swing.JButton();
btnDrugApprovalRequestDetails = new javax.swing.JButton();
btnDrugDetails1 = new javax.swing.JButton();
btnComposeNewMail = new javax.swing.JButton();
btnCheckMails = new javax.swing.JButton();
jPanel2 = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
setLayout(new java.awt.BorderLayout());
jSplitPane1.setDividerLocation(150);
jSplitPane1.setOrientation(javax.swing.JSplitPane.VERTICAL_SPLIT);
jPanel1.setBackground(new java.awt.Color(255, 255, 255));
btnAddNewManufacturingOrganization.setBackground(new java.awt.Color(255, 255, 255));
btnAddNewManufacturingOrganization.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnAddNewManufacturingOrganization.setForeground(new java.awt.Color(0, 153, 0));
btnAddNewManufacturingOrganization.setText("Add New Manufacturing Organization");
btnAddNewManufacturingOrganization.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnAddNewManufacturingOrganizationActionPerformed(evt);
}
});
btnManageManufacturingOrganizations.setBackground(new java.awt.Color(255, 255, 255));
btnManageManufacturingOrganizations.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnManageManufacturingOrganizations.setForeground(new java.awt.Color(0, 153, 0));
btnManageManufacturingOrganizations.setText("Manage Manufacturing Organizations");
btnManageManufacturingOrganizations.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnManageManufacturingOrganizationsActionPerformed(evt);
}
});
btnAddNewDistributorOrganization.setBackground(new java.awt.Color(255, 255, 255));
btnAddNewDistributorOrganization.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnAddNewDistributorOrganization.setForeground(new java.awt.Color(0, 153, 0));
btnAddNewDistributorOrganization.setText("Add New Distributor Organization");
btnAddNewDistributorOrganization.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnAddNewDistributorOrganizationActionPerformed(evt);
}
});
btnManageDistributorOrganizations.setBackground(new java.awt.Color(255, 255, 255));
btnManageDistributorOrganizations.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnManageDistributorOrganizations.setForeground(new java.awt.Color(0, 153, 0));
btnManageDistributorOrganizations.setText("Manage Distributor Organizations");
btnManageDistributorOrganizations.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnManageDistributorOrganizationsActionPerformed(evt);
}
});
btnAddNewDrug.setBackground(new java.awt.Color(255, 255, 255));
btnAddNewDrug.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnAddNewDrug.setForeground(new java.awt.Color(0, 153, 0));
btnAddNewDrug.setText("Add New Drug");
btnAddNewDrug.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnAddNewDrugActionPerformed(evt);
}
});
btnDrugApprovalRequestDetails.setBackground(new java.awt.Color(255, 255, 255));
btnDrugApprovalRequestDetails.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnDrugApprovalRequestDetails.setForeground(new java.awt.Color(0, 153, 0));
btnDrugApprovalRequestDetails.setText("Drug Approval Request Details");
btnDrugApprovalRequestDetails.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnDrugApprovalRequestDetailsActionPerformed(evt);
}
});
btnDrugDetails1.setBackground(new java.awt.Color(255, 255, 255));
btnDrugDetails1.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnDrugDetails1.setForeground(new java.awt.Color(0, 153, 0));
btnDrugDetails1.setText("Drug Details");
btnDrugDetails1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnDrugDetails1ActionPerformed(evt);
}
});
btnComposeNewMail.setBackground(new java.awt.Color(255, 255, 255));
btnComposeNewMail.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnComposeNewMail.setForeground(new java.awt.Color(0, 153, 0));
btnComposeNewMail.setText("Compose Mail");
btnComposeNewMail.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnComposeNewMailActionPerformed(evt);
}
});
btnCheckMails.setBackground(new java.awt.Color(255, 255, 255));
btnCheckMails.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
btnCheckMails.setForeground(new java.awt.Color(0, 153, 0));
btnCheckMails.setText("Inbox");
btnCheckMails.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnCheckMailsActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(btnAddNewManufacturingOrganization, javax.swing.GroupLayout.PREFERRED_SIZE, 379, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(btnManageManufacturingOrganizations, javax.swing.GroupLayout.PREFERRED_SIZE, 381, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(btnAddNewDrug, javax.swing.GroupLayout.PREFERRED_SIZE, 174, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(btnAddNewDistributorOrganization, javax.swing.GroupLayout.PREFERRED_SIZE, 379, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(btnManageDistributorOrganizations, javax.swing.GroupLayout.PREFERRED_SIZE, 381, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(btnDrugDetails1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))
.addGap(28, 28, 28)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(btnCheckMails, javax.swing.GroupLayout.PREFERRED_SIZE, 124, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(btnComposeNewMail, javax.swing.GroupLayout.DEFAULT_SIZE, 296, Short.MAX_VALUE))
.addComponent(btnDrugApprovalRequestDetails, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGap(247, 247, 247))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(35, 35, 35)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnAddNewManufacturingOrganization)
.addComponent(btnAddNewDrug)
.addComponent(btnManageManufacturingOrganizations)
.addComponent(btnDrugApprovalRequestDetails))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 29, Short.MAX_VALUE)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnAddNewDistributorOrganization)
.addComponent(btnManageDistributorOrganizations)
.addComponent(btnDrugDetails1)
.addComponent(btnCheckMails)
.addComponent(btnComposeNewMail))
.addGap(23, 23, 23))
);
jSplitPane1.setLeftComponent(jPanel1);
jPanel2.setBackground(new java.awt.Color(255, 255, 255));
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/baseClasses/healtech logo.PNG"))); // NOI18N
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup()
.addGap(0, 1587, Short.MAX_VALUE)
.addComponent(jLabel1))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup()
.addGap(0, 525, Short.MAX_VALUE)
.addComponent(jLabel1))
);
jSplitPane1.setRightComponent(jPanel2);
add(jSplitPane1, java.awt.BorderLayout.CENTER);
}// </editor-fold>//GEN-END:initComponents
private void btnAddNewManufacturingOrganizationActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnAddNewManufacturingOrganizationActionPerformed
// TODO add your handling code here:
jSplitPane1.setBottomComponent(new userInterface.pharmaceuticalCompany.AddNewManufacturer(internationalNetwork, pharmaceuticalCompanyEnterprise, inCityEnterprise));
}//GEN-LAST:event_btnAddNewManufacturingOrganizationActionPerformed
private void btnManageManufacturingOrganizationsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnManageManufacturingOrganizationsActionPerformed
// TODO add your handling code here:
JPanel manageManufacturerProfileCard = new JPanel();
manageManufacturerProfileCard.setLayout(new CardLayout());
manageManufacturerProfileCard.add(new ManageManufacturerProfiles(internationalNetwork,manageManufacturerProfileCard,pharmaceuticalCompanyEnterprise,inCityEnterprise));
CardLayout layout = (CardLayout) manageManufacturerProfileCard.getLayout();
layout.next(manageManufacturerProfileCard);
jSplitPane1.setBottomComponent(manageManufacturerProfileCard);
}//GEN-LAST:event_btnManageManufacturingOrganizationsActionPerformed
private void btnAddNewDistributorOrganizationActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnAddNewDistributorOrganizationActionPerformed
// TODO add your handling code here:
jSplitPane1.setBottomComponent(new userInterface.pharmaceuticalCompany.AddNewDistributor(internationalNetwork,pharmaceuticalCompanyEnterprise,inCityEnterprise));
}//GEN-LAST:event_btnAddNewDistributorOrganizationActionPerformed
private void btnManageDistributorOrganizationsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnManageDistributorOrganizationsActionPerformed
// TODO add your handling code here:
//jSplitPane1.setBottomComponent(new userInterface.pharmaceuticalCompany.EditDistributorDetails(internationalNetwork));
JPanel manageDistibutorrProfileCard = new JPanel();
manageDistibutorrProfileCard.setLayout(new CardLayout());
manageDistibutorrProfileCard.add(new ManageDistributorProfiles(internationalNetwork,manageDistibutorrProfileCard,pharmaceuticalCompanyEnterprise,inCityEnterprise));
CardLayout layout = (CardLayout) manageDistibutorrProfileCard.getLayout();
layout.next(manageDistibutorrProfileCard);
jSplitPane1.setBottomComponent(manageDistibutorrProfileCard);
}//GEN-LAST:event_btnManageDistributorOrganizationsActionPerformed
private void btnAddNewDrugActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnAddNewDrugActionPerformed
// TODO add your handling code here:
jSplitPane1.setBottomComponent(new AddNewDrug(internationalNetwork, pharmaceuticalCompanyEnterprise, inCityEnterprise, userAccount));
}//GEN-LAST:event_btnAddNewDrugActionPerformed
private void btnDrugApprovalRequestDetailsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnDrugApprovalRequestDetailsActionPerformed
// TODO add your handling code here:
jSplitPane1.setBottomComponent(new DrugRequestDetailsPanel(pharmaceuticalCompanyEnterprise, userAccount));
}//GEN-LAST:event_btnDrugApprovalRequestDetailsActionPerformed
private void btnDrugDetails1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnDrugDetails1ActionPerformed
// TODO add your handling code here:
jSplitPane1.setBottomComponent(new DrugDetails(pharmaceuticalCompanyEnterprise, userAccount));
}//GEN-LAST:event_btnDrugDetails1ActionPerformed
private void btnComposeNewMailActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnComposeNewMailActionPerformed
// TODO add your handling code here:
jSplitPane1.setRightComponent(new ComposeMail(internationalNetwork, userAccount));
}//GEN-LAST:event_btnComposeNewMailActionPerformed
private void btnCheckMailsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCheckMailsActionPerformed
// TODO add your handling code here:
jSplitPane1.setRightComponent(new Inbox(userAccount.getWorkQueue()));
}//GEN-LAST:event_btnCheckMailsActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnAddNewDistributorOrganization;
private javax.swing.JButton btnAddNewDrug;
private javax.swing.JButton btnAddNewManufacturingOrganization;
private javax.swing.JButton btnCheckMails;
private javax.swing.JButton btnComposeNewMail;
private javax.swing.JButton btnDrugApprovalRequestDetails;
private javax.swing.JButton btnDrugDetails1;
private javax.swing.JButton btnManageDistributorOrganizations;
private javax.swing.JButton btnManageManufacturingOrganizations;
private javax.swing.JLabel jLabel1;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JSplitPane jSplitPane1;
// End of variables declaration//GEN-END:variables
}
| |
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.api.model.dmn;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.BiConsumer;
import com.google.common.collect.ImmutableMap;
import org.junit.Test;
import org.kie.dmn.api.core.DMNContext;
import org.kie.dmn.api.core.DMNMetadata;
import org.kie.server.api.model.dmn.DMNResultKS.MapBackedDMNContext;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class DMNResultKSContextTest{
@Test
public void testEmptyContext() {
MapBackedDMNContext ctx1 = MapBackedDMNContext.of(new HashMap<>(Collections.emptyMap()));
testCloneAndAlter(ctx1, Collections.emptyMap(), Collections.emptyMap());
MapBackedDMNContext ctx2 = MapBackedDMNContext.of(new HashMap<>(Collections.emptyMap()));
testPushAndPopScope(ctx2, Collections.emptyMap(), Collections.emptyMap());
}
@Test
public void testContextWithEntries() {
MapBackedDMNContext ctx1 = MapBackedDMNContext.of(new HashMap<>(DEFAULT_ENTRIES));
testCloneAndAlter(ctx1, DEFAULT_ENTRIES, Collections.emptyMap());
MapBackedDMNContext ctx2 = MapBackedDMNContext.of(new HashMap<>(DEFAULT_ENTRIES));
testPushAndPopScope(ctx2, DEFAULT_ENTRIES, Collections.emptyMap());
}
@Test
public void testContextWithEntriesAndMetadata() {
MapBackedDMNContext ctx1 = MapBackedDMNContext.of(new HashMap<>(DEFAULT_ENTRIES), new HashMap<>(DEFAULT_METADATA));
testCloneAndAlter(ctx1, DEFAULT_ENTRIES, DEFAULT_METADATA);
MapBackedDMNContext ctx2 = MapBackedDMNContext.of(new HashMap<>(DEFAULT_ENTRIES), new HashMap<>(DEFAULT_METADATA));
testPushAndPopScope(ctx2, DEFAULT_ENTRIES, DEFAULT_METADATA);
}
protected static final Map<String, Object> DEFAULT_ENTRIES = ImmutableMap.of(
"s_entr1", "value1",
"i_entr2", 2,
"f_entr3", 3.0,
"s_entr4", "4"
);
protected static final String DEFAULT_SCOPE_NAME = "theName";
protected static final String DEFAULT_SCOPE_NAMESPACE = "theNamespace";
protected static final Map<String, Object> DEFAULT_SCOPE_ENTRIES = ImmutableMap.of(
"s_scEn1", "scopeValue1",
"i_scEn2", 2,
"f_scEn3", 3.0,
"s_scEn4", "4"
);
protected static final Map<String, Object> DEFAULT_METADATA = ImmutableMap.of(
"s_meta1", "value1",
"i_meta2", 2,
"f_meta3", 3.0,
"s_meta4", "4"
);
public void testEquals(DMNContext ctx, Map<String, Object> expectedEntries, Map<String, Object> expectedMetadata) {
testEntries(ctx, expectedEntries);
testEntries(ctx.getMetadata(), expectedMetadata);
}
public void testCloneAndAlter(DMNContext ctx, Map<String, Object> expectedEntries, Map<String, Object> expectedMetadata) {
// test that original context matches expected entries
testEquals(ctx, expectedEntries, expectedMetadata);
// test that cloned context matches original context
DMNContext cloned = ctx.clone();
testEquals(cloned, expectedEntries, expectedMetadata);
// alter original context
Map<String, Object> alteredEntries = alter(ctx::set, expectedEntries, ImmutableMap.of(
"f_entr3", 6.0,
"s_entr5", "five"
));
Map<String, Object> alteredMetadata = alter(ctx.getMetadata()::set, expectedMetadata, ImmutableMap.of(
"i_meta2", 20,
"s_meta5", "FIVE"
));
// test that original context matches altered entries
testEquals(ctx, alteredEntries, alteredMetadata);
// test that cloned context still matches original context
testEquals(cloned, expectedEntries, expectedMetadata);
// alter original context
Map<String, Object> alteredClonedEntries = alter(cloned::set, expectedEntries, ImmutableMap.of(
"f_entr3", 9.0,
"s_entr6", "six"
));
Map<String, Object> alteredClonedMetadata = alter(cloned.getMetadata()::set, expectedMetadata, ImmutableMap.of(
"i_meta2", 200,
"s_meta6", "SIX"
));
// test that original context still matches altered entries
testEquals(ctx, alteredEntries, alteredMetadata);
// test that cloned context matches altered cloned entries
testEquals(cloned, alteredClonedEntries, alteredClonedMetadata);
}
public void testPushAndPopScope(DMNContext ctx, Map<String, Object> expectedEntries, Map<String, Object> expectedMetadata) {
// test that original context matches expected entries
testEquals(ctx, expectedEntries, expectedMetadata);
// test that no namespace is set
assertNamespaceIsAbsent(ctx);
// alter context by pushing a new scope with default entries
ctx.pushScope(DEFAULT_SCOPE_NAME, DEFAULT_SCOPE_NAMESPACE);
for (Map.Entry<String, Object> entry : DEFAULT_SCOPE_ENTRIES.entrySet()) {
ctx.set(entry.getKey(), entry.getValue());
}
// test that namespace is the expected one with the expected entries
assertNamespaceEquals(DEFAULT_SCOPE_NAMESPACE, ctx);
testEquals(ctx, DEFAULT_SCOPE_ENTRIES, expectedMetadata);
// alter context by popping the scope
ctx.popScope();
// test that no namespace is set again
assertNamespaceIsAbsent(ctx);
// generate altered entry map
Map<String, Object> alteredEntries = new HashMap<>(expectedEntries);
alteredEntries.put(DEFAULT_SCOPE_NAME, DEFAULT_SCOPE_ENTRIES);
// test that entries matches altered map
testEquals(ctx, alteredEntries, expectedMetadata);
}
public void testEntries(DMNContext context, Map<String, Object> expectedEntries) {
testEntries(containerFor(context), expectedEntries);
}
public void testEntries(DMNMetadata metadata, Map<String, Object> expectedEntries) {
testEntries(containerFor(metadata), expectedEntries);
}
private void testEntries(EntryContainerFacade container, Map<String, Object> expectedEntries) {
Map<String, Object> currentEntries = container.getAll();
assertNotNull(currentEntries);
assertEquals(expectedEntries.size(), currentEntries.size());
for (Map.Entry<String, Object> entry : expectedEntries.entrySet()) {
assertTrue(currentEntries.containsKey(entry.getKey()));
assertEquals(entry.getValue(), currentEntries.get(entry.getKey()));
assertTrue(container.isDefined(entry.getKey()));
assertEquals(entry.getValue(), container.get(entry.getKey()));
}
}
public static <K,V> Map<K,V> alter(BiConsumer<K,V> setter, Map<K,V> original, Map<K,V> additions) {
Map<K,V> altered = new HashMap<>(original);
for (Map.Entry<K,V> entry : additions.entrySet()) {
altered.put(entry.getKey(), entry.getValue());
setter.accept(entry.getKey(), entry.getValue());
}
return altered;
}
public static void assertNamespaceIsAbsent(DMNContext ctx) {
Optional<String> optNamespace = ctx.scopeNamespace();
assertNotNull(optNamespace);
assertFalse(optNamespace.isPresent());
}
public static void assertNamespaceEquals(String expectedName, DMNContext ctx) {
Optional<String> optNamespace = ctx.scopeNamespace();
assertNotNull(optNamespace);
assertTrue(optNamespace.isPresent());
assertEquals(expectedName, optNamespace.get());
}
public static void assertNamespaceEquals(DMNContext expectedCtx, DMNContext testCtx) {
Optional<String> optExpectedNamespace = expectedCtx.scopeNamespace();
assertNotNull(optExpectedNamespace);
Optional<String> optTestNamespace = testCtx.scopeNamespace();
assertNotNull(optTestNamespace);
if (optExpectedNamespace.isPresent()) {
assertTrue(optTestNamespace.isPresent());
assertEquals(optExpectedNamespace.get(), optTestNamespace.get());
} else {
assertFalse(optTestNamespace.isPresent());
}
}
private static EntryContainerFacade containerFor(DMNContext context) {
return new EntryContainerFacade() {
@Override
public Object set(String name, Object value) {
return context.set(name, value);
}
@Override
public Object get(String name) {
return context.get(name);
}
@Override
public Map<String, Object> getAll() {
return context.getAll();
}
@Override
public boolean isDefined(String name) {
return context.isDefined(name);
}
};
}
private static EntryContainerFacade containerFor(DMNMetadata metadata) {
return new EntryContainerFacade() {
@Override
public Object set(String name, Object value) {
return metadata.set(name, value);
}
@Override
public Object get(String name) {
return metadata.get(name);
}
@Override
public Map<String, Object> getAll() {
return metadata.asMap();
}
@Override
public boolean isDefined(String name) {
return metadata.get(name) != null;
}
};
}
private interface EntryContainerFacade {
Object set(String name, Object value);
Object get(String name);
Map<String, Object> getAll();
boolean isDefined(String name);
}
}
| |
/*
* Copyright (c) 2001-2007 Sun Microsystems, Inc. All rights reserved.
*
* The Sun Project JXTA(TM) Software License
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. The end-user documentation included with the redistribution, if any, must
* include the following acknowledgment: "This product includes software
* developed by Sun Microsystems, Inc. for JXTA(TM) technology."
* Alternately, this acknowledgment may appear in the software itself, if
* and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Sun", "Sun Microsystems, Inc.", "JXTA" and "Project JXTA" must
* not be used to endorse or promote products derived from this software
* without prior written permission. For written permission, please contact
* Project JXTA at http://www.jxta.org.
*
* 5. Products derived from this software may not be called "JXTA", nor may
* "JXTA" appear in their name, without prior written permission of Sun.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SUN
* MICROSYSTEMS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* JXTA is a registered trademark of Sun Microsystems, Inc. in the United
* States and other countries.
*
* Please see the license information page at :
* <http://www.jxta.org/project/www/license.html> for instructions on use of
* the license in source files.
*
* ====================================================================
*
* This software consists of voluntary contributions made by many individuals
* on behalf of Project JXTA. For more information on Project JXTA, please see
* http://www.jxta.org.
*
* This license is based on the BSD license adopted by the Apache Foundation.
*/
package net.jxta.impl.endpoint.tls;
import net.jxta.endpoint.EndpointAddress;
import net.jxta.endpoint.Message;
import net.jxta.endpoint.Messenger;
import net.jxta.endpoint.WireFormatMessage;
import net.jxta.endpoint.WireFormatMessageFactory;
import net.jxta.impl.membership.pse.PSECredential;
import net.jxta.impl.util.TimeUtils;
import net.jxta.logging.Logging;
import net.jxta.util.IgnoreFlushFilterOutputStream;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.Principal;
import java.security.Provider;
import java.security.Security;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* This class implements the TLS connection between two peers.
*
*
* <p/>Properties:
*
* <p/>net.jxta.impl.endpoint.tls.TMFAlgorithm - if defined provides the name of
* the trust manager factory algorithm to use.
*/
class TlsConn {
/**
* Logger
*/
private static final transient Logger LOG = Logger.getLogger(TlsConn.class.getName());
static final int BOSIZE = 16000;
/**
* TLS transport this connection is working for.
*/
final TlsTransport transport;
/**
* The address of the peer to which we will be forwarding ciphertext
* messages.
*/
final EndpointAddress destAddr;
/**
* Are we client or server?
*/
private boolean client;
/**
* State of the connection
*/
private volatile HandshakeState currentState;
/**
* Are we currently closing? To prevent recursion in {@link #close(HandshakeState)}
*/
private boolean closing = false;
/**
* Time that something "good" last happened on the connection
*/
long lastAccessed;
final String lastAccessedLock = "lastAccessedLock";
final String closeLock = "closeLock";
/**
* Number of retransmissions we have received.
*/
int retrans;
/**
* Our synthetic socket which sends and receives the ciphertext.
*/
final TlsSocket tlsSocket;
private final SSLContext context;
/**
* For interfacing with TLS
*/
private SSLSocket ssls;
/**
* We write our plaintext to this stream
*/
private OutputStream plaintext_out = null;
/**
* Reads plaintext from the
*/
private PlaintextMessageReader readerThread = null;
/**
* A string which we can lock on while acquiring new messengers. We don't
* want to lock the whole connection object.
*/
private final String acquireMessengerLock = "Messenger Acquire Lock";
/**
* Cached messenger for sending to {@link #destAddr}
*/
private Messenger outBoundMessenger = null;
/**
* Tracks the state of our TLS connection with a remote peer.
*/
enum HandshakeState {
/**
* Handshake is ready to begin. We will be the client side.
*/
CLIENTSTART, /**
* Handshake is ready to begin. We will be the server side.
*/
SERVERSTART, /**
* Handshake is in progress.
*/
HANDSHAKESTARTED, /**
* Handshake failed to complete.
*/
HANDSHAKEFAILED, /**
* Handshake completed successfully.
*/
HANDSHAKEFINISHED, /**
* Connection is closing.
*/
CONNECTIONCLOSING, /**
* Connection has died.
*/
CONNECTIONDEAD
}
/**
* Create a new connection
*/
TlsConn(TlsTransport tp, EndpointAddress destAddr, boolean client, java.security.PrivateKey privateKey) throws Exception {
this.transport = tp;
this.destAddr = destAddr;
this.client = client;
this.currentState = client ? HandshakeState.CLIENTSTART : HandshakeState.SERVERSTART;
this.lastAccessed = TimeUtils.timeNow();
Logging.logCheckedInfo(LOG, (client ? "Initiating" : "Accepting"), " new connection for : ", destAddr.getProtocolAddress());
boolean choseTMF = false;
javax.net.ssl.TrustManagerFactory tmf = null;
String overrideTMF = System.getProperty("net.jxta.impl.endpoint.tls.TMFAlgorithm");
if (null != overrideTMF) {
tmf = javax.net.ssl.TrustManagerFactory.getInstance(overrideTMF);
choseTMF = true;
}
Collection<Provider> providers = Arrays.asList(Security.getProviders());
Set<String> providerNames = new HashSet<String>();
for (Provider provider : providers) {
providerNames.add((provider).getName());
}
if ((!choseTMF) && providerNames.contains("SunJSSE")) {
tmf = javax.net.ssl.TrustManagerFactory.getInstance("SunX509", "SunJSSE");
choseTMF = true;
}
if ((!choseTMF) && providerNames.contains("IBMJSSE")) {
tmf = javax.net.ssl.TrustManagerFactory.getInstance("IbmX509", "IBMJSSE");
choseTMF = true;
}
// XXX 20040830 bondolo Other solutions go here!
if (!choseTMF) {
tmf = javax.net.ssl.TrustManagerFactory.getInstance(javax.net.ssl.TrustManagerFactory.getDefaultAlgorithm());
LOG.warning("Using defeualt Trust Manager Factory algorithm. This may not work as expected.");
}
KeyStore trusted = transport.membership.getPSEConfig().getKeyStore();
tmf.init(trusted);
javax.net.ssl.TrustManager[] tms = tmf.getTrustManagers();
javax.net.ssl.KeyManager[] kms = new javax.net.ssl.KeyManager[]{new PSECredentialKeyManager(transport.credential, trusted, privateKey)};
context = SSLContext.getInstance("TLS");
context.init(kms, tms, null);
javax.net.ssl.SSLSocketFactory factory = context.getSocketFactory();
// endpoint interface
TlsSocket newConnect = new TlsSocket(new JTlsInputStream(this, tp.MIN_IDLE_RECONNECT), new JTlsOutputStream(transport, this));
// open SSL socket and do the handshake
ssls = (SSLSocket) factory.createSocket(newConnect, destAddr.getProtocolAddress(), JTlsDefs.FAKEPORT, true);
ssls.setEnabledProtocols(new String[]{"TLSv1"});
ssls.setUseClientMode(client);
if (!client) {
ssls.setNeedClientAuth(true);
}
// We have to delay initialization of this until we have set the
// handshake mode.
tlsSocket = newConnect;
}
/**
* @inheritDoc <p/>An implementation which is useful for debugging.
*/
@Override
public String toString() {
return super.toString() + "/" + getHandshakeState() + ":" + (client ? "Client" : "Server") + " for " + destAddr;
}
/**
* Returns the current state of the connection
*
* @return the current state of the connection.
*/
HandshakeState getHandshakeState() {
return currentState;
}
/**
* Changes the state of the connection. Calls
* {@link java.lang.Object#notifyAll()} to wake any threads waiting on
* connection state changes.
*
* @param newstate the new connection state.
* @return the previous state of the connection.
*/
synchronized HandshakeState setHandshakeState(HandshakeState newstate) {
HandshakeState oldstate = currentState;
currentState = newstate;
notifyAll();
return oldstate;
}
/**
* Open the connection with the remote peer.
* @throws java.io.IOException if handshake fails
*/
void finishHandshake() throws IOException {
long startTime = TimeUtils.timeNow();
Logging.logCheckedInfo(LOG, (client ? "Client:" : "Server:"), " Handshake START");
setHandshakeState(HandshakeState.HANDSHAKESTARTED);
// this starts a handshake
SSLSession newSession = ssls.getSession();
if ("SSL_NULL_WITH_NULL_NULL".equals(newSession.getCipherSuite())) {
setHandshakeState(HandshakeState.HANDSHAKEFAILED);
throw new IOException("Handshake failed");
}
setHandshakeState(HandshakeState.HANDSHAKEFINISHED);
long hsTime = TimeUtils.toRelativeTimeMillis(TimeUtils.timeNow(), startTime) / TimeUtils.ASECOND;
Logging.logCheckedInfo(LOG, (client ? "Client:" : "Server:"), "Handshake DONE in ", hsTime, " secs");
// set up plain text i/o
// writes to be encrypted
plaintext_out = new BufferedOutputStream(ssls.getOutputStream(), BOSIZE);
// Start reader thread
readerThread = new PlaintextMessageReader(ssls.getInputStream());
}
/**
* Close this connection.
*
* @param finalstate state that the connection will be in after close.
* @throws java.io.IOException if an error occurs
*/
void close(HandshakeState finalstate) throws IOException {
synchronized (lastAccessedLock) {
lastAccessed = Long.MIN_VALUE;
}
synchronized (closeLock) {
closing = true;
Logging.logCheckedInfo(LOG, "Shutting down ", this);
setHandshakeState(HandshakeState.CONNECTIONCLOSING);
try {
if (null != tlsSocket) {
try {
tlsSocket.close();
} catch (IOException ignored) {
;
}
}
if (null != ssls) {
try {
ssls.close();
} catch (IOException ignored) {
;
}
ssls = null;
}
if (null != outBoundMessenger) {
outBoundMessenger.close();
outBoundMessenger = null;
}
} catch (Throwable failed) {
Logging.logCheckedInfo(LOG, "Throwable during close ", this, "\n", failed);
IOException failure = new IOException("Throwable during close()");
failure.initCause(failed);
throw failure;
} finally {
closeLock.notifyAll();
closing = false;
setHandshakeState(finalstate);
}
}
}
/**
* Used by the TlsManager and the TlsConn in order to send a message,
* either a TLS connection establishment, or TLS fragments to the remote TLS.
*
* @param msg message to send to the remote TLS peer.
* @return if true then message was sent, otherwise false.
* @throws IOException if there was a problem sending the message.
*/
boolean sendToRemoteTls(Message msg) throws IOException {
synchronized (acquireMessengerLock) {
if ((null == outBoundMessenger) || outBoundMessenger.isClosed()) {
Logging.logCheckedFine(LOG, "Getting messenger for ", destAddr);
EndpointAddress realAddr = new EndpointAddress(destAddr, JTlsDefs.ServiceName, null);
// Get a messenger.
outBoundMessenger = transport.endpoint.getMessenger(realAddr);
if (outBoundMessenger == null) {
Logging.logCheckedWarning(LOG, "Could not get messenger for ", realAddr);
return false;
}
}
}
Logging.logCheckedFine(LOG, "Sending ", msg, " to ", destAddr);
// Good we have a messenger. Send the message.
return outBoundMessenger.sendMessage(msg);
}
/**
* sendMessage is called by the TlsMessenger each time a service or
* an application sends a new message over a TLS connection.
* IOException is thrown when something goes wrong.
*
* <p/>The message is encrypted by TLS ultimately calling
* JTlsOutputStream.write(byte[], int, int); with the resulting TLS
* Record(s).
*
* @param msg The plaintext message to be sent via this connection.
* @throws IOException for errors in sending the message.
*/
void sendMessage(Message msg) throws IOException {
try {
WireFormatMessage serialed = WireFormatMessageFactory.toWireExternalWithTls(msg, JTlsDefs.MTYPE, null, transport.getPeerGroup());
serialed.sendToStream(new IgnoreFlushFilterOutputStream(plaintext_out));
plaintext_out.flush();
} catch (IOException failed) {
Logging.logCheckedInfo(LOG, "Closing ", this, " due to exception\n", failed);
close(HandshakeState.CONNECTIONDEAD);
throw failed;
}
}
/**
* This is our message reader thread. This reads from the plaintext input
* stream and dispatches messages received to the endpoint.
*/
private class PlaintextMessageReader implements Runnable {
InputStream ptin = null;
Thread workerThread = null;
public PlaintextMessageReader(InputStream ptin) {
this.ptin = ptin;
// start our thread
workerThread = new Thread(TlsConn.this.transport.myThreadGroup, this, "JXTA TLS Plaintext Reader for " + TlsConn.this.destAddr);
workerThread.setDaemon(true);
workerThread.start();
Logging.logCheckedInfo(LOG, "Started ReadPlaintextMessage thread for ", TlsConn.this.destAddr);
}
/**
* @inheritDoc
*/
public void run() {
try {
while (true) {
try {
Message msg = WireFormatMessageFactory.fromWireExternalWithTls(ptin, JTlsDefs.MTYPE, null, transport.getPeerGroup());
if (null == msg) {
break;
}
// dispatch it to TlsTransport for demuxing
Logging.logCheckedFine(LOG, "Dispatching ", msg, " to TlsTransport");
TlsConn.this.transport.processReceivedMessage(msg);
synchronized (TlsConn.this.lastAccessedLock) {
TlsConn.this.lastAccessed = TimeUtils.timeNow(); // update idle timer
}
} catch (IOException iox) {
Logging.logCheckedWarning(LOG, "I/O error while reading decrypted Message\n", iox);
break;
}
}
} catch (Throwable all) {
Logging.logCheckedSevere(LOG, "Uncaught Throwable in thread :", Thread.currentThread().getName(), "\n", all);
} finally {
workerThread = null;
}
Logging.logCheckedInfo(LOG, "Finishing ReadPlaintextMessage thread");
}
}
/**
* A private key manager which selects based on the key and cert chain found
* in a PSE Credential.
*
* <p/>TODO Promote this class to a full featured interface for all of the
* active PSECredentials. Currently the alias "theone" is used to refer to
* the
*/
private static class PSECredentialKeyManager implements javax.net.ssl.X509KeyManager {
java.security.PrivateKey privateKey;
PSECredential cred;
KeyStore trusted;
public PSECredentialKeyManager(PSECredential useCred, KeyStore trusted, java.security.PrivateKey privateKey) {
this.cred = useCred;
this.trusted = trusted;
this.privateKey = privateKey;
}
/**
* {@inheritDoc}
*/
public String chooseClientAlias(String[] keyType, java.security.Principal[] issuers, java.net.Socket socket) {
for (String aKeyType : Arrays.asList(keyType)) {
String result = checkTheOne(aKeyType, Arrays.asList(issuers));
if (null != result) {
return result;
}
}
return null;
}
/**
* Checks to see if a peer that trusts the given issuers would trust the
* special alias THE_ONE, returning it if so, and null otherwise.
*
* @param keyType the type of key a Certificate must use to be considered
* @param allIssuers the issuers trusted by the other peer
* @return "theone" if one of the Certificates in this peer's PSECredential's
* Certificate chain matches the given keyType and one of the issuers,
* or <code>null</code>
*/
private String checkTheOne(String keyType, Collection<java.security.Principal> allIssuers) {
List<X509Certificate> certificates = Arrays.asList(cred.getCertificateChain());
for (X509Certificate certificate : certificates) {
if (!certificate.getPublicKey().getAlgorithm().equals(keyType)) {
continue;
}
Logging.logCheckedFine(LOG, "CHECKING: ", certificate.getIssuerX500Principal(), " in ", allIssuers);
if (allIssuers.contains(certificate.getIssuerX500Principal())) {
return "theone";
}
}
return null;
}
/**
* {@inheritDoc}
*/
public String chooseServerAlias(String keyType, java.security.Principal[] issuers, java.net.Socket socket) {
String[] available = getServerAliases(keyType, issuers);
if (null != available) {
return available[0];
} else {
return null;
}
}
/**
* {@inheritDoc}
*/
public X509Certificate[] getCertificateChain(String alias) {
if (alias.equals("theone")) {
return cred.getCertificateChain();
} else {
try {
return (X509Certificate[]) trusted.getCertificateChain(alias);
} catch (KeyStoreException ignored) {
return null;
}
}
}
/**
* {@inheritDoc}
*/
public String[] getClientAliases(String keyType, java.security.Principal[] issuers) {
List<String> clientAliases = new ArrayList<String>();
try {
Enumeration<String> eachAlias = trusted.aliases();
Collection<Principal> allIssuers = null;
if (null != issuers) {
allIssuers = Arrays.asList(issuers);
}
while (eachAlias.hasMoreElements()) {
String anAlias = eachAlias.nextElement();
if (trusted.isCertificateEntry(anAlias)) {
try {
X509Certificate aCert = (X509Certificate) trusted.getCertificate(anAlias);
if (null == aCert) {
// strange... it should have been there...
continue;
}
if (!aCert.getPublicKey().getAlgorithm().equals(keyType)) {
continue;
}
if (null != allIssuers) {
if (allIssuers.contains(aCert.getIssuerX500Principal())) {
clientAliases.add(anAlias);
}
} else {
clientAliases.add(anAlias);
}
} catch (KeyStoreException ignored) {
;
}
}
}
} catch (KeyStoreException ignored) {
;
}
return clientAliases.toArray(new String[clientAliases.size()]);
}
/**
* {@inheritDoc}
*/
public java.security.PrivateKey getPrivateKey(String alias) {
if (alias.equals("theone")) {
return privateKey;
} else {
return null;
}
}
/**
* {@inheritDoc}
*/
public String[] getServerAliases(String keyType, java.security.Principal[] issuers) {
if (keyType.equals(cred.getCertificate().getPublicKey().getAlgorithm())) {
if (null == issuers) {
return new String[]{"theone"};
} else {
Collection<Principal> allIssuers = Arrays.asList(issuers);
if (Logging.SHOW_FINE && LOG.isLoggable(Level.FINE)) {
Logging.logCheckedFine(LOG, "Looking for : ", cred.getCertificate().getIssuerX500Principal());
Logging.logCheckedFine(LOG, "Issuers : ", allIssuers);
java.security.Principal prin = cred.getCertificate().getIssuerX500Principal();
Logging.logCheckedFine(LOG, " Principal Type :", prin.getClass().getName());
for (Principal issuer : allIssuers) {
Logging.logCheckedFine(LOG, "Issuer Type : ", issuer.getClass().getName());
Logging.logCheckedFine(LOG, "Issuer value : ", issuer);
Logging.logCheckedFine(LOG, "tmp.equals(prin) : ", issuer.equals(prin));
}
}
X509Certificate[] chain = cred.getCertificateChain();
for (X509Certificate aCert : Arrays.asList(chain)) {
if (allIssuers.contains(aCert.getIssuerX500Principal())) {
return new String[]{"theone"};
}
}
}
}
return null;
}
}
}
| |
/*
* Copyright (c) 2006-2017 DMDirc Developers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.dmdirc.commandline;
import com.dmdirc.config.GlobalConfig;
import com.dmdirc.interfaces.ConnectionManager;
import com.dmdirc.config.provider.AggregateConfigProvider;
import com.dmdirc.util.InvalidURIException;
import com.dmdirc.util.system.SystemInfo;
import com.dmdirc.util.URIParser;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.inject.Singleton;
/**
* Parses command line arguments for the client.
*/
@Singleton
public class CommandLineParser {
/**
* The arguments that the client supports, in groups of four, in the following order: short
* option, long option, description, whether or not the option takes an argument.
*/
private static final Object[][] ARGUMENTS = {
{'c', "connect", "Connect to the specified server", Boolean.TRUE},
{'d', "directory", "Use the specified configuration directory", Boolean.TRUE},
{'e', "existing", "Try to use an existing instance of DMDirc (use with -c)", Boolean.FALSE},
{'h', "help", "Show command line options and exit", Boolean.FALSE},
{'l', "launcher", "Specifies the version of DMDirc's launcher", Boolean.TRUE},
{'p', "portable", "Enable portable mode", Boolean.FALSE},
{'r', "disable-reporting", "Disable automatic error reporting", Boolean.FALSE},
{'v', "version", "Display client version and exit", Boolean.FALSE},
{'k', "check", "Check if an existing instance of DMDirc exists.", Boolean.FALSE}
};
/** A list of addresses to autoconnect to. */
private final List<URI> addresses = new ArrayList<>();
/** Provider to use to get server managers. */
@Nullable private final Provider<ConnectionManager> serverManagerProvider;
/** Provider to use to get the global config. */
@Nullable private final Provider<AggregateConfigProvider> globalConfigProvider;
/** The parser to use for URIs. */
@Nullable private final URIParser uriParser;
/** Used to retrieve informationa about the running system. */
private final SystemInfo systemInfo;
/** Whether to disable error reporting or not. */
private boolean disablereporting;
/** The version string passed for the launcher. */
private Optional<String> launcherVersion;
/** The configuration directory. */
private String configDirectory;
/** The RMI server we're using. */
private RemoteInterface server;
/**
* Creates a new instance of CommandLineParser.
*
* @param serverManagerProvider Provider to use to get server managers.
* @param globalConfigProvider Provider to use to get the global config.
* @param uriParser The parser to use for URIs.
*/
@Inject
public CommandLineParser(
@Nullable final Provider<ConnectionManager> serverManagerProvider,
@Nullable @GlobalConfig final Provider<AggregateConfigProvider> globalConfigProvider,
@Nullable final URIParser uriParser,
final SystemInfo systemInfo) {
this.serverManagerProvider = serverManagerProvider;
this.globalConfigProvider = globalConfigProvider;
this.uriParser = uriParser;
this.systemInfo = systemInfo;
launcherVersion = Optional.empty();
}
/**
* Parses the given arguments.
*
* @param arguments The arguments to be parsed
*/
public void parse(final String... arguments) {
boolean inArg = false;
char previousArg = '.';
for (String arg : arguments) {
if (inArg) {
processArgument(previousArg, arg);
inArg = false;
} else {
if (arg.startsWith("--")) {
previousArg = processLongArg(arg.substring(2));
inArg = checkArgument(previousArg);
} else if (arg.charAt(0) == '-') {
previousArg = processShortArg(arg.substring(1));
inArg = checkArgument(previousArg);
} else {
doUnknownArg("Unknown argument: " + arg);
}
}
}
if (inArg) {
doUnknownArg("Missing parameter for argument: " + previousArg);
}
if (server != null) {
try {
server.connect(addresses);
System.exit(0);
} catch (RemoteException ex) {
System.err.println("Unable to execute remote connection: " + ex.getMessage());
ex.printStackTrace();
}
}
if (serverManagerProvider != null) {
new RemoteServer(serverManagerProvider).bind();
}
}
/**
* Checks whether the specified arg type takes an argument. If it does, this method returns
* true. If it doesn't, the method processes the argument and returns false.
*
* @param argument The short code of the argument
*
* @return True if the arg requires an argument, false otherwise
*/
private boolean checkArgument(final char argument) {
boolean needsArg = false;
for (Object[] target : ARGUMENTS) {
if (target[0].equals(argument)) {
needsArg = (Boolean) target[3];
break;
}
}
if (needsArg) {
return true;
} else {
processArgument(argument, null);
return false;
}
}
/**
* Processes the specified string as a single long argument.
*
* @param arg The string entered
*
* @return The short form of the corresponding argument
*/
private char processLongArg(final String arg) {
for (Object[] target : ARGUMENTS) {
if (arg.equalsIgnoreCase((String) target[1])) {
return (Character) target[0];
}
}
doUnknownArg("Unknown argument: " + arg);
exit();
return '.';
}
/**
* Processes the specified string as a single short argument.
*
* @param arg The string entered
*
* @return The short form of the corresponding argument
*/
private char processShortArg(final String arg) {
for (Object[] target : ARGUMENTS) {
if (arg.equals(String.valueOf(target[0]))) {
return (Character) target[0];
}
}
doUnknownArg("Unknown argument: " + arg);
exit();
return '.';
}
/**
* Processes the specified command-line argument.
*
* @param arg The short form of the argument used
* @param param The (optional) string parameter for the option
*/
private void processArgument(final char arg, final String param) {
switch (arg) {
case 'c':
doConnect(param);
break;
case 'd':
doDirectory(Paths.get(param));
break;
case 'e':
doExisting();
break;
case 'k':
doExistingCheck();
break;
case 'h':
doHelp();
break;
case 'l':
launcherVersion = Optional.ofNullable(param);
break;
case 'p':
doDirectory(Paths.get(systemInfo.getProperty("user.dir")));
break;
case 'r':
disablereporting = true;
break;
case 'v':
doVersion();
break;
default:
// This really shouldn't ever happen, but we'll handle it nicely
// anyway.
doUnknownArg("Unknown argument: " + arg);
break;
}
}
/**
* Informs the user that they entered an unknown argument, prints the client help, and exits.
*
* @param message The message about the unknown argument to be displayed
*/
private void doUnknownArg(final String message) {
System.out.println(message);
System.out.println();
doHelp();
}
/**
* Exits DMDirc.
*/
private void exit() {
System.exit(0);
}
/**
* Handles the --connect argument.
*
* @param address The address the user told us to connect to
*/
private void doConnect(final String address) {
if (uriParser != null) {
try {
addresses.add(uriParser.parseFromText(address));
} catch (InvalidURIException ex) {
doUnknownArg("Invalid address specified: " + ex.getMessage());
}
} else {
System.out.println("Unable to connect to address.");
exit();
}
}
/**
* Handles the --existing argument.
*/
private void doExisting() {
server = RemoteServer.getServer();
if (server == null) {
System.err.println("Unable to connect to existing instance");
}
}
/**
* Handles the --check argument.
*/
private void doExistingCheck() {
if (RemoteServer.getServer() == null) {
System.out.println("Existing instance not found.");
System.exit(1);
} else {
System.out.println("Existing instance found.");
System.exit(0);
}
}
/**
* Sets the config directory to the one specified.
*
* @param dir The new config directory
*/
private void doDirectory(final Path dir) {
if (!Files.exists(dir)) {
try {
Files.createDirectories(dir);
} catch (IOException ex) {
System.err.println("Unable to create directory " + dir);
System.exit(1);
}
}
configDirectory = dir.toAbsolutePath().toString() + File.separator;
if (!Files.exists(Paths.get(configDirectory))) {
System.err.println("Unable to resolve directory " + dir);
System.exit(1);
}
}
/**
* Prints out the client version and exits.
*/
private void doVersion() {
System.out.println("DMDirc - a cross-platform, open-source IRC client.");
System.out.println();
if (globalConfigProvider == null) {
System.out.println("Version: Unknown");
exit();
}
final AggregateConfigProvider globalConfig = globalConfigProvider.get();
System.out.println(" Version: " + globalConfig.getOption("version", "version"));
System.out.println(" Update channel: " + globalConfig.getOption("updater", "channel"));
exit();
}
/**
* Prints out client help and exits.
*/
private void doHelp() {
System.out.println("Usage: java -jar DMDirc.jar [options]");
System.out.println();
System.out.println("Valid options:");
System.out.println();
int maxLength = 0;
for (Object[] arg : ARGUMENTS) {
final String needsArg = ((Boolean) arg[3]) ? " <argument>" : "";
if ((arg[1] + needsArg).length() > maxLength) {
maxLength = (arg[1] + needsArg).length();
}
}
for (Object[] arg : ARGUMENTS) {
final String needsArg = ((Boolean) arg[3]) ? " <argument>" : "";
final StringBuilder desc = new StringBuilder(maxLength + 1);
desc.append(arg[1]);
while (desc.length() < maxLength + 1) {
desc.append(' ');
}
System.out.print(" -" + arg[0] + needsArg);
System.out.println(" --" + desc + needsArg + ' ' + arg[2]);
System.out.println();
}
exit();
}
/**
* Returns the user-supplied configuration directory.
*
* @return The user-supplied config directory, or {@code null} if none was supplied.
*/
public String getConfigDirectory() {
return configDirectory;
}
/**
* Indicates whether the user has requested error reporting be disabled.
*
* @return True if the user has disabled reporting, false otherwise.
*/
public boolean getDisableReporting() {
return disablereporting;
}
/**
* Returns the provided launcher version, if any.
*
* @return The version supplied by the launcher, or {@code null} if no launcher is identified.
*/
public Optional<String> getLauncherVersion() {
return launcherVersion;
}
/**
* Processes arguments once the client has been loaded properly. This allows us to auto-connect
* to servers, etc.
*
* @param connectionManager The server manager to use to connect servers.
*/
public void processArguments(final ConnectionManager connectionManager) {
addresses.forEach(connectionManager::connectToAddress);
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.benchmark.query;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.Files;
import io.druid.benchmark.datagen.BenchmarkDataGenerator;
import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
import io.druid.benchmark.datagen.BenchmarkSchemas;
import io.druid.java.util.common.concurrent.Execs;
import io.druid.data.input.InputRow;
import io.druid.data.input.Row;
import io.druid.hll.HyperLogLogHash;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.granularity.Granularities;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.logger.Logger;
import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import io.druid.query.Druids;
import io.druid.query.FinalizeResultsQueryRunner;
import io.druid.query.Query;
import io.druid.query.QueryPlus;
import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryToolChest;
import io.druid.query.Result;
import io.druid.query.TableDataSource;
import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.select.EventHolder;
import io.druid.query.select.PagingSpec;
import io.druid.query.select.SelectQuery;
import io.druid.query.select.SelectQueryConfig;
import io.druid.query.select.SelectQueryEngine;
import io.druid.query.select.SelectQueryQueryToolChest;
import io.druid.query.select.SelectQueryRunnerFactory;
import io.druid.query.select.SelectResultValue;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import io.druid.segment.IncrementalIndexSegment;
import io.druid.segment.IndexIO;
import io.druid.segment.IndexMergerV9;
import io.druid.segment.IndexSpec;
import io.druid.segment.QueryableIndex;
import io.druid.segment.QueryableIndexSegment;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@State(Scope.Benchmark)
@Fork(value = 1)
@Warmup(iterations = 10)
@Measurement(iterations = 25)
public class SelectBenchmark
{
@Param({"1"})
private int numSegments;
@Param({"25000"})
private int rowsPerSegment;
@Param({"basic.A"})
private String schemaAndQuery;
@Param({"1000"})
private int pagingThreshold;
private static final Logger log = new Logger(SelectBenchmark.class);
private static final int RNG_SEED = 9999;
private static final IndexMergerV9 INDEX_MERGER_V9;
private static final IndexIO INDEX_IO;
public static final ObjectMapper JSON_MAPPER;
private List<IncrementalIndex> incIndexes;
private List<QueryableIndex> qIndexes;
private QueryRunnerFactory factory;
private BenchmarkSchemaInfo schemaInfo;
private Druids.SelectQueryBuilder queryBuilder;
private SelectQuery query;
private File tmpDir;
private ExecutorService executorService;
static {
JSON_MAPPER = new DefaultObjectMapper();
INDEX_IO = new IndexIO(
JSON_MAPPER,
OffHeapMemorySegmentWriteOutMediumFactory.instance(),
new ColumnConfig()
{
@Override
public int columnCacheSizeBytes()
{
return 0;
}
}
);
INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
}
private static final Map<String, Map<String, Druids.SelectQueryBuilder>> SCHEMA_QUERY_MAP = new LinkedHashMap<>();
private void setupQueries()
{
// queries for the basic schema
Map<String, Druids.SelectQueryBuilder> basicQueries = new LinkedHashMap<>();
BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
{ // basic.A
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
Druids.SelectQueryBuilder queryBuilderA =
Druids.newSelectQueryBuilder()
.dataSource(new TableDataSource("blah"))
.dimensionSpecs(DefaultDimensionSpec.toSpec(Arrays.<String>asList()))
.metrics(Arrays.<String>asList())
.intervals(intervalSpec)
.granularity(Granularities.ALL)
.descending(false);
basicQueries.put("A", queryBuilderA);
}
SCHEMA_QUERY_MAP.put("basic", basicQueries);
}
@Setup
public void setup() throws IOException
{
log.info("SETUP CALLED AT " + System.currentTimeMillis());
if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault()));
}
executorService = Execs.multiThreaded(numSegments, "SelectThreadPool");
setupQueries();
String[] schemaQuery = schemaAndQuery.split("\\.");
String schemaName = schemaQuery[0];
String queryName = schemaQuery[1];
schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schemaName);
queryBuilder = SCHEMA_QUERY_MAP.get(schemaName).get(queryName);
queryBuilder.pagingSpec(PagingSpec.newSpec(pagingThreshold));
query = queryBuilder.build();
incIndexes = new ArrayList<>();
for (int i = 0; i < numSegments; i++) {
BenchmarkDataGenerator gen = new BenchmarkDataGenerator(
schemaInfo.getColumnSchemas(),
RNG_SEED + i,
schemaInfo.getDataInterval(),
rowsPerSegment
);
IncrementalIndex incIndex = makeIncIndex();
for (int j = 0; j < rowsPerSegment; j++) {
InputRow row = gen.nextRow();
if (j % 10000 == 0) {
log.info(j + " rows generated.");
}
incIndex.add(row);
}
incIndexes.add(incIndex);
}
tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpDir.getAbsolutePath());
qIndexes = new ArrayList<>();
for (int i = 0; i < numSegments; i++) {
File indexFile = INDEX_MERGER_V9.persist(
incIndexes.get(i),
tmpDir,
new IndexSpec(),
null
);
QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile);
qIndexes.add(qIndex);
}
final Supplier<SelectQueryConfig> selectConfigSupplier = Suppliers.ofInstance(new SelectQueryConfig(true));
factory = new SelectQueryRunnerFactory(
new SelectQueryQueryToolChest(
JSON_MAPPER,
QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator(),
selectConfigSupplier
),
new SelectQueryEngine(),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
);
}
@TearDown
public void tearDown() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
private IncrementalIndex makeIncIndex()
{
return new IncrementalIndex.Builder()
.setSimpleTestingIndexSchema(schemaInfo.getAggsArray())
.setReportParseExceptions(false)
.setMaxRowCount(rowsPerSegment)
.buildOnheap();
}
private static <T> List<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query)
{
QueryToolChest toolChest = factory.getToolchest();
QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>(
toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)),
toolChest
);
Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query), Maps.<String, Object>newHashMap());
return Sequences.toList(queryResult, Lists.<T>newArrayList());
}
// don't run this benchmark with a query that doesn't use QueryGranularities.ALL,
// this pagination function probably doesn't work correctly in that case.
private SelectQuery incrementQueryPagination(SelectQuery query, SelectResultValue prevResult)
{
Map<String, Integer> pagingIdentifiers = prevResult.getPagingIdentifiers();
Map<String, Integer> newPagingIdentifers = new HashMap<>();
for (String segmentId : pagingIdentifiers.keySet()) {
int newOffset = pagingIdentifiers.get(segmentId) + 1;
newPagingIdentifers.put(segmentId, newOffset);
}
return query.withPagingSpec(new PagingSpec(newPagingIdentifers, pagingThreshold));
}
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryIncrementalIndex(Blackhole blackhole) throws Exception
{
SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold));
String segmentId = "incIndex";
QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner(
factory,
segmentId,
new IncrementalIndexSegment(incIndexes.get(0), segmentId)
);
boolean done = false;
while (!done) {
List<Result<SelectResultValue>> results = SelectBenchmark.runQuery(factory, runner, queryCopy);
SelectResultValue result = results.get(0).getValue();
if (result.getEvents().size() == 0) {
done = true;
} else {
for (EventHolder eh : result.getEvents()) {
blackhole.consume(eh);
}
queryCopy = incrementQueryPagination(queryCopy, result);
}
}
}
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryQueryableIndex(Blackhole blackhole) throws Exception
{
SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold));
String segmentId = "qIndex";
QueryRunner<Result<SelectResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(
factory,
segmentId,
new QueryableIndexSegment(segmentId, qIndexes.get(0))
);
boolean done = false;
while (!done) {
List<Result<SelectResultValue>> results = SelectBenchmark.runQuery(factory, runner, queryCopy);
SelectResultValue result = results.get(0).getValue();
if (result.getEvents().size() == 0) {
done = true;
} else {
for (EventHolder eh : result.getEvents()) {
blackhole.consume(eh);
}
queryCopy = incrementQueryPagination(queryCopy, result);
}
}
}
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception
{
SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold));
String segmentName;
List<QueryRunner<Result<SelectResultValue>>> singleSegmentRunners = Lists.newArrayList();
QueryToolChest toolChest = factory.getToolchest();
for (int i = 0; i < numSegments; i++) {
segmentName = "qIndex" + i;
QueryRunner<Result<SelectResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(
factory,
segmentName,
new QueryableIndexSegment(segmentName, qIndexes.get(i))
);
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
QueryRunner theRunner = toolChest.postMergeQueryDecoration(
new FinalizeResultsQueryRunner<>(
toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)),
toolChest
)
);
boolean done = false;
while (!done) {
Sequence<Result<SelectResultValue>> queryResult = theRunner.run(QueryPlus.wrap(queryCopy), Maps.newHashMap());
List<Result<SelectResultValue>> results = Sequences.toList(queryResult, Lists.<Result<SelectResultValue>>newArrayList());
SelectResultValue result = results.get(0).getValue();
if (result.getEvents().size() == 0) {
done = true;
} else {
for (EventHolder eh : result.getEvents()) {
blackhole.consume(eh);
}
queryCopy = incrementQueryPagination(queryCopy, result);
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.servicecatalog.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/servicecatalog-2015-12-10/ScanProvisionedProducts"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ScanProvisionedProductsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The language code.
* </p>
* <ul>
* <li>
* <p>
* <code>en</code> - English (default)
* </p>
* </li>
* <li>
* <p>
* <code>jp</code> - Japanese
* </p>
* </li>
* <li>
* <p>
* <code>zh</code> - Chinese
* </p>
* </li>
* </ul>
*/
private String acceptLanguage;
/**
* <p>
* The access level to use to obtain results. The default is <code>User</code>.
* </p>
*/
private AccessLevelFilter accessLevelFilter;
/**
* <p>
* The maximum number of items to return with this call.
* </p>
*/
private Integer pageSize;
/**
* <p>
* The page token for the next set of results. To retrieve the first set of results, use null.
* </p>
*/
private String pageToken;
/**
* <p>
* The language code.
* </p>
* <ul>
* <li>
* <p>
* <code>en</code> - English (default)
* </p>
* </li>
* <li>
* <p>
* <code>jp</code> - Japanese
* </p>
* </li>
* <li>
* <p>
* <code>zh</code> - Chinese
* </p>
* </li>
* </ul>
*
* @param acceptLanguage
* The language code.</p>
* <ul>
* <li>
* <p>
* <code>en</code> - English (default)
* </p>
* </li>
* <li>
* <p>
* <code>jp</code> - Japanese
* </p>
* </li>
* <li>
* <p>
* <code>zh</code> - Chinese
* </p>
* </li>
*/
public void setAcceptLanguage(String acceptLanguage) {
this.acceptLanguage = acceptLanguage;
}
/**
* <p>
* The language code.
* </p>
* <ul>
* <li>
* <p>
* <code>en</code> - English (default)
* </p>
* </li>
* <li>
* <p>
* <code>jp</code> - Japanese
* </p>
* </li>
* <li>
* <p>
* <code>zh</code> - Chinese
* </p>
* </li>
* </ul>
*
* @return The language code.</p>
* <ul>
* <li>
* <p>
* <code>en</code> - English (default)
* </p>
* </li>
* <li>
* <p>
* <code>jp</code> - Japanese
* </p>
* </li>
* <li>
* <p>
* <code>zh</code> - Chinese
* </p>
* </li>
*/
public String getAcceptLanguage() {
return this.acceptLanguage;
}
/**
* <p>
* The language code.
* </p>
* <ul>
* <li>
* <p>
* <code>en</code> - English (default)
* </p>
* </li>
* <li>
* <p>
* <code>jp</code> - Japanese
* </p>
* </li>
* <li>
* <p>
* <code>zh</code> - Chinese
* </p>
* </li>
* </ul>
*
* @param acceptLanguage
* The language code.</p>
* <ul>
* <li>
* <p>
* <code>en</code> - English (default)
* </p>
* </li>
* <li>
* <p>
* <code>jp</code> - Japanese
* </p>
* </li>
* <li>
* <p>
* <code>zh</code> - Chinese
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ScanProvisionedProductsRequest withAcceptLanguage(String acceptLanguage) {
setAcceptLanguage(acceptLanguage);
return this;
}
/**
* <p>
* The access level to use to obtain results. The default is <code>User</code>.
* </p>
*
* @param accessLevelFilter
* The access level to use to obtain results. The default is <code>User</code>.
*/
public void setAccessLevelFilter(AccessLevelFilter accessLevelFilter) {
this.accessLevelFilter = accessLevelFilter;
}
/**
* <p>
* The access level to use to obtain results. The default is <code>User</code>.
* </p>
*
* @return The access level to use to obtain results. The default is <code>User</code>.
*/
public AccessLevelFilter getAccessLevelFilter() {
return this.accessLevelFilter;
}
/**
* <p>
* The access level to use to obtain results. The default is <code>User</code>.
* </p>
*
* @param accessLevelFilter
* The access level to use to obtain results. The default is <code>User</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ScanProvisionedProductsRequest withAccessLevelFilter(AccessLevelFilter accessLevelFilter) {
setAccessLevelFilter(accessLevelFilter);
return this;
}
/**
* <p>
* The maximum number of items to return with this call.
* </p>
*
* @param pageSize
* The maximum number of items to return with this call.
*/
public void setPageSize(Integer pageSize) {
this.pageSize = pageSize;
}
/**
* <p>
* The maximum number of items to return with this call.
* </p>
*
* @return The maximum number of items to return with this call.
*/
public Integer getPageSize() {
return this.pageSize;
}
/**
* <p>
* The maximum number of items to return with this call.
* </p>
*
* @param pageSize
* The maximum number of items to return with this call.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ScanProvisionedProductsRequest withPageSize(Integer pageSize) {
setPageSize(pageSize);
return this;
}
/**
* <p>
* The page token for the next set of results. To retrieve the first set of results, use null.
* </p>
*
* @param pageToken
* The page token for the next set of results. To retrieve the first set of results, use null.
*/
public void setPageToken(String pageToken) {
this.pageToken = pageToken;
}
/**
* <p>
* The page token for the next set of results. To retrieve the first set of results, use null.
* </p>
*
* @return The page token for the next set of results. To retrieve the first set of results, use null.
*/
public String getPageToken() {
return this.pageToken;
}
/**
* <p>
* The page token for the next set of results. To retrieve the first set of results, use null.
* </p>
*
* @param pageToken
* The page token for the next set of results. To retrieve the first set of results, use null.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ScanProvisionedProductsRequest withPageToken(String pageToken) {
setPageToken(pageToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAcceptLanguage() != null)
sb.append("AcceptLanguage: ").append(getAcceptLanguage()).append(",");
if (getAccessLevelFilter() != null)
sb.append("AccessLevelFilter: ").append(getAccessLevelFilter()).append(",");
if (getPageSize() != null)
sb.append("PageSize: ").append(getPageSize()).append(",");
if (getPageToken() != null)
sb.append("PageToken: ").append(getPageToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ScanProvisionedProductsRequest == false)
return false;
ScanProvisionedProductsRequest other = (ScanProvisionedProductsRequest) obj;
if (other.getAcceptLanguage() == null ^ this.getAcceptLanguage() == null)
return false;
if (other.getAcceptLanguage() != null && other.getAcceptLanguage().equals(this.getAcceptLanguage()) == false)
return false;
if (other.getAccessLevelFilter() == null ^ this.getAccessLevelFilter() == null)
return false;
if (other.getAccessLevelFilter() != null && other.getAccessLevelFilter().equals(this.getAccessLevelFilter()) == false)
return false;
if (other.getPageSize() == null ^ this.getPageSize() == null)
return false;
if (other.getPageSize() != null && other.getPageSize().equals(this.getPageSize()) == false)
return false;
if (other.getPageToken() == null ^ this.getPageToken() == null)
return false;
if (other.getPageToken() != null && other.getPageToken().equals(this.getPageToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAcceptLanguage() == null) ? 0 : getAcceptLanguage().hashCode());
hashCode = prime * hashCode + ((getAccessLevelFilter() == null) ? 0 : getAccessLevelFilter().hashCode());
hashCode = prime * hashCode + ((getPageSize() == null) ? 0 : getPageSize().hashCode());
hashCode = prime * hashCode + ((getPageToken() == null) ? 0 : getPageToken().hashCode());
return hashCode;
}
@Override
public ScanProvisionedProductsRequest clone() {
return (ScanProvisionedProductsRequest) super.clone();
}
}
| |
/*
* The akquinet maven-latex-plugin project
*
* Copyright (c) 2011 by akquinet tech@spree GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.simuline.m2latex.core;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
// FIXME: jdee bug: delete static imports: does not find superfluous
/**
* Sole interface to <code>org.apache.commons.io.</code>.
* A collection of utility methods for file manipulation.
*/
class TexFileUtils {
//private final static String PREFIX_HIDDEN = ".";
private final static String PATTERN_INS_LATEX_MAIN = "T\\$T";
private final LogWrapper log;
TexFileUtils(LogWrapper log) {
this.log = log;
}
/**
* Returns the listing of the directory <code>dir</code>
* or <code>null</code> if it is not readable
* and emit an according warning if so.
* <p>
* Logging:
* WFU01: Cannot read directory
*
* @param dir
* an existing directory.
* @return
* the list of entries of <code>dir</code>
* or <code>null</code> if it is not readable.
*/
// used only in
// constructor of DirNode
// copyOutputToTargetFolder, deleteX
File[] listFilesOrWarn(File dir) {
assert dir != null && dir.isDirectory() : "Expected folder found "+dir;
File[] files = dir.listFiles();
warnIfNull(files, dir);
return files;
}
/**
* Returns the listing of the directory <code>dir</code>
* filtered by <code>filter</code>
* or <code>null</code> if <code>dir</code> is not readable
* and emit an according warning if so.
* <p>
* Logging:
* WFU01: Cannot read directory
*
* @param dir
* an existing directory.
* @param filter
* a file filter
* @return
* the list of entries of <code>dir</code>
* accepted by <code>filter</code>
* or <code>null</code> if <code>dir</code> is not readable.
*/
// used by LatexProcessor.runMakeIndexByNeed only
File[] listFilesOrWarn(File dir, FileFilter filter) {
assert dir != null && dir.isDirectory() : "Expected folder found "+dir;
File[] files = dir.listFiles(filter);
warnIfNull(files, dir);
return files;
}
private void warnIfNull(File[] files, File dir) {
if (files == null) {
this.log.warn("WFU01: Cannot read directory '" + dir +
"'; build may be incomplete. ");
}
}
/**
* Returns the directory containing <code>sourceFile</code>
* with the prefix <code>sourceBaseDir</code>
* replaced by <code>targetBaseDir</code>.
* E.g. <code>sourceFile=/tmp/adir/afile</code>,
* <code>sourceBaseDir=/tmp</code>, <code>targetBaseDir=/home</code>
* returns <code>/home/adir/</code>.
*
* @param srcFile
* the source file the parent directory of which
* shall be converted to the target.
* @param srcBaseDir
* the base directory of the source.
* Immediately or not,
* <code>sourceFile</code> shall be in <code>sourceBaseDir</code>.
* @param targetBaseDir
* the base directory of the target.
* @return
* the directory below <code>targetBaseDir</code>
* which corresponds to the parent directory of <code>sourceFile</code>
* which is below <code>sourceBaseDir</code>.
* @throws BuildFailureException
* TFU01: if the target directory that would be returned
* exists already as a regular file.
*/
// used by LatexProcessor.create() only
File getTargetDirectory(File srcFile,
File srcBaseDir,
File targetBaseDir) throws BuildFailureException {
Path srcParentPath = srcFile.getParentFile().toPath();
Path srcBasePath = srcBaseDir.toPath();
assert srcParentPath.startsWith(srcBasePath);
srcParentPath = srcBasePath.relativize(srcParentPath);
// this may exist but if so it is a directory because holds srcFile
File targetDir = new File(targetBaseDir, srcParentPath.toString());
targetDir.mkdirs();
if (!targetDir.isDirectory()) {
throw new BuildFailureException
("TFU01: Cannot create destination directory '" + targetDir +
"'. ");
}
assert targetDir.isDirectory();
return targetDir;
}
/**
* Returns the pdf file which serves as a blue print for <code>pdfFileAct</code>.
* The relative path of <code>pdfFileAct</code> to its ancestor <code>artifactBaseDir</code>
* is the same as the blueprint to its ancestor <code>diffRootDir</code>.
*
* @param pdfFileAct
* the artifact to be checked agains a blueprint in <code>diffRootDir</code>.
* It must be contained in <code>artifactBaseDir</code>, immediately or not.
* @param artifactBaseDir
* the base directory for all artifacts, in particular of <code>pdfFileAct</code>.
* @param diffBaseDir
* the root directory of all blue prints corresponding with <code>diffRootDir</code>.
*/
static File getPdfFileDiff(File pdfFileAct, File artifactBaseDir, File diffBaseDir) {
Path pdfFileActPath = pdfFileAct .toPath();
Path artifactBasePath = artifactBaseDir.toPath();
assert pdfFileActPath.startsWith(artifactBasePath);
pdfFileActPath = artifactBasePath.relativize(pdfFileActPath);
// this may exist but if so it is a directory because holds srcFile
return new File(diffBaseDir, pdfFileActPath.toString());
}
/**
* Returns a file filter matching neither directories
* nor <code>texFile</code>
* but else all files with names matching <code>pattern</code>,
* where the special sequence {@link #PATTERN_INS_LATEX_MAIN}
* is replaced by the prefix of <code>texFile</code>.
*
* @param texFile
* a latex main file for which a file filter has to be created.
* @param pattern
* a pattern
* for which the special sequence {@link #PATTERN_INS_LATEX_MAIN}
* is replaced by the prefix of <code>texFile</code>
* before a file filter is created from it.
* @return
* a non-null file filter matching neither directories
* nor <code>texFile</code>
* but else all files with names matching <code>pattern</code>,
* where the special sequence {@link #PATTERN_INS_LATEX_MAIN}
* is replaced by the prefix of <code>texFile</code>.
*/
// used only: in methods
// - LatexProcessor.create on tex-file to determine output files.
// - LatexPreProcessor.clearTargetTex to clear also intermediate files.
static FileFilter getFileFilter(File texFile, String pattern) {
final String patternAccept = pattern
.replaceAll(PATTERN_INS_LATEX_MAIN,
getFileNameWithoutSuffix(texFile));
return new FileFilter() {
public boolean accept(File file) {
// the second is superfluous for copying
// and only needed for deletion.
if (file.isDirectory() || file.equals(texFile)) {
return false;
}
return file.getName().matches(patternAccept);
}
};
}
/**
* Returns a file filter matching no directories
* but else all files with names matching <code>xxx<pattern>.idx</code>,
* where <code>idxFile</code> has the form <code>xxx.idx</code>.
*
* @param idxFile
* an idx file for which a file filter has to be created.
* @param pattern
* a pattern which is inserted in the name of <code>idxFile</code>
* right before the suffix.
* @return
* a non-null file filter matching no directories
* but else all files matching <code>xxx<pattern>.idx</code>.
*/
// used by LatexProcessor.runMakeIndexByNeed only
FileFilter getFileFilterReplace(File idxFile, String pattern) {
final String patternAccept = getFileNameWithoutSuffix(idxFile)
+ pattern + getSuffix(idxFile);
return new FileFilter() {
public boolean accept(File file) {
if (file.isDirectory()) {
return false;
}
return file.getName().matches(patternAccept);
}
};
}
/**
* Copies output of the current goal to target folder.
* The source is the parent folder of <code>texFile</code>,
* all its files passing <code>fileFilter</code>
* are considered as output files and
* are copied to <code>targetDir</code>.
* This is invoked by {@link LatexProcessor#create()} only.
* <p>
* Logging:
* <ul>
* <li> WFU01: Cannot read directory...
* <li> WFU03: Cannot close
* </ul>
*
* @param texFile
* the latex main file which was processed.
* Its parent directory
* is the working directory of the compilation process
* in which the output files are created.
* Thus it must be readable (in fact it must also be writable;
* otherwise the output files could not have been created).
* @param fileFilter
* the filter accepting the files (and best only the files)
* which are the result of the processing.
* @param targetDir
* the target directory the output files have to be copied to.
* If this exists already, it must be a directory
* and it must be writable.
* If it does not exist, it must be creatable.
* @return
* The set of all target files.
* @throws BuildFailureException
* <ul>
* <li>TFU04, TFU05 if
* the destination file exists
* and is either a directory (TFU04) or is not writable (TFU05).
* <li>TFU06 if
* an IO-error orrurs when copying: opening streams, reading or writing.
* </ul>
*/
// used in LatexProcessor.create() only
Set<File> copyOutputToTargetFolder(File texFile,
FileFilter fileFilter,
File targetDir) throws BuildFailureException {
Set<File> targetFiles = new HashSet<File>();
assert texFile.exists() && !texFile.isDirectory()
: "Expected existing (regular) tex file " + texFile;
assert !targetDir.exists() || targetDir.isDirectory()
: "Expected existing target folder " + targetDir;
File texFileDir = texFile.getParentFile();
// may log warning WFU01
File[] outputFiles = listFilesOrWarn(texFileDir);
if (outputFiles == null) {
// Here, logging WFU01 already done
return targetFiles;
}
assert outputFiles != null;
File srcFile, destFile;
for (int idx = 0; idx < outputFiles.length; idx++) {
srcFile = outputFiles[idx];
assert srcFile.exists() : "Missing " + srcFile;
if (!fileFilter.accept(srcFile)) {
continue;
}
assert srcFile.exists() && !srcFile.isDirectory()
: "Expected existing (regular) tex file " + texFile;
// since !targetDir.exists() || targetDir.isDirectory()
assert !srcFile.equals(targetDir);
assert !srcFile.equals(texFile);
destFile = new File(targetDir, srcFile.getName());
if (destFile.isDirectory()) {
throw new BuildFailureException
("TFU04: Cannot overwrite directory '" + destFile + "'. ");
}
this.log.debug("Copying '" + srcFile.getName() +
"' to '" + targetDir + "'. ");
try {
// may throw IOException: opening streams, read/write
// may log warning WFU03: Cannot close
doCopyFile(srcFile, destFile);
targetFiles.add(destFile);
} catch (IOException e) {
throw new BuildFailureException("TFU06: Cannot copy '" +
srcFile.getName() +"' to '" + targetDir + "'. ",
e);
}
} // for
return targetFiles;
}
// FIXME: copied from FileUtils
/**
* Internal copy file method.
* <p>
* Logging:
* WFU03: Cannot close
*
* @param srcFile
* the source file.
* @param destFile
* the destination file.
* @throws IOException
* if an error occurs: opening input/output streams,
* reading from file/writing to file.
*/
private void doCopyFile(File srcFile, File destFile) throws IOException {
// may throw FileNotFoundException <= IOException
// if cannot be opened for reading: e.g. not exists, is a directory,...
FileInputStream input = new FileInputStream(srcFile);
try {
// may throw FileNotFoundException <= IOException
FileOutputStream output = new FileOutputStream(destFile);
// if cannot be opened for writing:
// e.g. not exists, is a directory,...
try {
// may throw IOException if an I/O-error occurs
// when reading or writing
copyStream(input, output);
} finally {
// may log warning WFU03
closeQuietly(output);
}
} finally {
// may log warning WFU03
closeQuietly(input);
}
assert !destFile.isDirectory() && destFile.canWrite()
: "Expected existing (regular) writable file "+destFile;
destFile.setLastModified(srcFile.lastModified());
}
/**
* The default buffer size ({@value}) to use for
* {@link #copyStream(InputStream, OutputStream)}
*/
private static final int DEFAULT_BUFFER_SIZE = 1024 * 4;
/**
* Copy bytes from a large (over 2GB) <code>InputStream</code> to an
* <code>OutputStream</code>.
* <p>
* This method uses the provided buffer, so there is no need to use a
* <code>BufferedInputStream</code>.
*
* @param input
* the <code>InputStream</code> to read from
* @param output
* the <code>OutputStream</code> to write to
* @throws IOException
* if an I/O error occurs while reading or writing
*/
private static void copyStream(InputStream input,
OutputStream output) throws IOException {
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
int n;
// may throw IOException
while (-1 != (n = input.read(buffer))) {
// may throw IOException
output.write(buffer, 0, n);
}
}
// FIXME: almost copy from IOUtils
/**
* Unconditionally close a <code>Closeable</code>.
* <p>
* Equivalent to {@link Closeable#close()},
* except any exceptions will be ignored. FIXME
* This is typically used in finally blocks.
* <p>
* Example code:
* <pre>
* Closeable closeable = null;
* try {
* closeable = new FileReader("foo.txt");
* // process closeable
* closeable.close();
* } catch (Exception e) {
* // error handling
* } finally {
* IOUtils.closeQuietly(closeable);
* }
* </pre>
* <p>
* Logging:
* WFU03: Cannot close
*
* @param closeable
* the object to close, may be null or already closed
*/
private void closeQuietly(Closeable closeable) {
try {
closeable.close();
} catch (IOException ioe) {
this.log.warn("WFU03: Cannot close '" + closeable + "'. ", ioe);
}
}
// TBD: move elsewhere because this is specific for inkscape
// TBD: better even to eliminate.
/**
* The new preamble of the tex file originally created by inkscape
* with ending <code>eps_tex</code>.
* FIXME: version to be included.
*/
private final static String INKSCAPE_PREAMBLE =
"%% LatexMavenPlugin (version unknown) modified " +
"two of the following lines\n";
/**
* This is just a workaround because of inkscape's current flaw.
* It reads file <code>srcFile</code>
* which is expected to have name with ending <code>eps_tex</code>
* and writes a file with same name
* replacing ending by <code>tex</code> with following modifications:
* <ul>
* <li>Adds line {@link #INKSCAPE_PREAMBLE} atop </li>
* <li>Replaces line '%%Accompanies ...' by
* '%% Accompanies image files 'xxx.pdf/eps/ps'</li>
* <li>Replaces line
* '... \includegraphics[width=\\unitlength]{xxx.eps}...'
* by
* '... \includegraphics[width=\\unitlength]{xxx}...'</li>
* </ul>
* <p>
* Logging:
* EFU07, EFU08, EFU09: cannot fiter
*
* @param srcFile
* A file created by inkscape with ending <code>eps_tex</code>
* containing a lines
* <code>
* %% Accompanies image file 'xxx.eps' (pdf, eps, ps)</code> and
* <code>\put(0,0){\includegraphics[width=\\unitlength]{xxx.eps}}</code>
* with variable <code>xxx</code> and leading blanks\
*/
public void filterInkscapeIncludeFile(File srcFile) {
assert LatexPreProcessor.SUFFIX_EPSTEX.equals(getSuffix(srcFile))
: "Expected suffix '" + LatexPreProcessor.SUFFIX_EPSTEX +
"' found '" + getSuffix(srcFile) + "'";
File destFile = replaceSuffix(srcFile, LatexPreProcessor.SUFFIX_PTX);
File bareFile = replaceSuffix(srcFile, LatexPreProcessor.SUFFIX_VOID);
//FileReader reader = null;
BufferedReader bufferedReader = null;
FileWriter writer = null;
try {
// may throw FileNotFoundException < IOExcption
FileReader reader = new FileReader(srcFile);
// BufferedReader for perfromance and to be able to read a line
bufferedReader = new BufferedReader(reader);
// may throw IOExcption
writer = new FileWriter(destFile);
//BufferedWriter bufferedWriter = new BufferedWriter(writer);
String line;
// write preamble
// readLine may throw IOException
writer.write(INKSCAPE_PREAMBLE);
// first two lines: write as read
line = bufferedReader.readLine();
writer.write(line + "\n");
line = bufferedReader.readLine();
writer.write(line + "\n");
// third line must be changed.
line = bufferedReader.readLine();
line = line.replace(bareFile.getName()
+ LatexPreProcessor.SUFFIX_EPS
+ "' (pdf, eps, ps)",
bareFile.getName() + ".pdf/eps/ps'\n");
writer.write(line);
// readLine may throw IOException
// TBD: eliminate magic numbers
for (int idx = 4; idx < 56; idx++) {
line = bufferedReader.readLine();
writer.write(line + "\n");
}
// readLine may throw IOException
line = bufferedReader.readLine();
line = line.replace(bareFile.getName()
+ LatexPreProcessor.SUFFIX_EPS
+ "}}%",
bareFile.getName() + "}}%\n");
writer.write(line);
line = bufferedReader.readLine();
do {
writer.write(line + "\n");
// readLine may thr. IOException
line = bufferedReader.readLine();
} while(line != null);
} catch (IOException e) {
if (bufferedReader == null) {
// Here, FileNotFoundException on srcFile
this.log.error("EFU07: File '" + srcFile +
"' to be filtered cannot be read. ");
return;
}
if (writer == null) {
this.log.error("EFU08: Destination file '" + destFile +
"' for filtering cannot be written. ");
return;
}
this.log.error("EFU09: Cannot filter file '" + srcFile +
"' into '" + destFile + "'. ");
} finally {
// Here, an IOException may have occurred
// may log warning WFU03
// TBD: what if null?
closeQuietly(bufferedReader);
closeQuietly(writer);
}
}
/**
* Return the name of the given file without the suffix.
* If the suffix is empty, this is just the name of that file.
*
* @see #getSuffix(File)
*/
static String getFileNameWithoutSuffix(File file) {
String nameFile = file.getName();
int idxDot = nameFile.lastIndexOf(".");
return idxDot == -1
? nameFile
: nameFile.substring(0, idxDot);
}
/**
* Return the suffix of the name of the given file
* including the <code>.</code>,
* except there is no <code>.</code>.
* Then the suffix is empty.
*
* @see #getFileNameWithoutSuffix(File)
*/
// used only by
// LatexPreProcessor.processGraphicsSelectMain(Collection)
// LatexPreProcessor.clearCreated(DirNode)
// FIXME: problem if filename starts with . and has no further .
// then we have a hidden file and the suffix is all but the .
// This is not appropriate.
// One may ensure that this does not happen via an assertion
// and by modifying getFilesRec in a way that hidden files are skipped
static String getSuffix(File file) {
String nameFile = file.getName();
int idxDot = nameFile.lastIndexOf(".");
return idxDot == -1
? ""
: nameFile.substring(idxDot, nameFile.length());
}
// logFile may be .log or .blg or something
/**
* Returns whether the given file <code>file</code> (which shall exist)
* contains the given pattern <code>pattern</code>
* or <code>null</code> in case of problems reading <code>file</code>.
* This is typically applied to log files,
* but also to latex-files to find the latex main files.
* <p>
* Logging:
* WFU03 cannot close <br>
* Note that in case <code>null</code> is returned,
* no error/warning is logged.
* This must be done by the invoking method.
*
* @param file
* an existing proper file, not a folder.
* @param regex
* the pattern (regular expression) to look for in <code>file</code>.
* @return
* whether the given file <code>file</code> (which shall exist)
* contains the given pattern <code>pattern</code>.
* If the file does not exist or an IOException occurs
* while reading, <code>null</code> is returned.
*/
// used only in
// LatexPreProcessor.isLatexMainFile(File)
// LatexProcessor.needRun(...)
// AbstractLatexProcessor.hasErrsWarns(File, String)
Boolean matchInFile(File file, String regex) {
Pattern pattern = Pattern.compile(regex, Pattern.MULTILINE);//
boolean fromStart = regex.startsWith("\\A");
String lines = "";
try {
// may throw FileNotFoundException < IOExcption
FileReader fileReader = new FileReader(file);
// BufferedReader for performance and to be able to read a line
BufferedReader bufferedReader = new BufferedReader(fileReader);
//CharBuffer chars = CharBuffer.allocate(1000);
try {
// may throw IOException
// int numRead = bufferedReader.read(chars);
// System.out.println("file: "+file);
// System.out.println("numRead: "+numRead);
// System.out.println("chars: '"+chars+"'");
// FIXME: seemingly,
// find may not terminate in case ^(\s*)* but with ^s*
// but this seems a bug in java's regex engine
// return pattern.matcher(chars).find();
// readLine may throw IOException
for (String line = bufferedReader.readLine();
line != null;
// readLine may thr. IOException
line = bufferedReader.readLine()) {
// FIXME: linewise matching is not appropriate
// for further patterns line patternReRunLatex
// FIXME: seemingly, find may not terminate in case ^(\s*)* but with ^s*
// but this seems a bug in java's regex engine
lines = fromStart ? lines += "\n"+line : line;
if (pattern.matcher(lines).find()) {
return true;
}
}
return false;
} catch (IOException ioe) {
// Error/Warning must be issued by invoking method
return null;
} finally {
// Here, an IOException may have occurred
// may log warning WFU03
closeQuietly(bufferedReader);
}
} catch (FileNotFoundException ffe) {
// Error/Warning must be issued by invoking method
return null;
}
}
/**
* Returns the set of strings representing the <code>idxGroup</code>
* of the pattern <code>regex</code> matching a line
* in file <code>file</code> or returns <code>null</code>
* in case of problems reading <code>file</code>.
* <p>
* This is used only to collect the identifiers
* of explicitly given indices in an idx-file.
*
* <p>
* Logging:
* WFU03 cannot close <br>
*
* @param file
* an existing proper file, not a folder.
* In practice this is an idx file.
* @param regex
* the pattern (regular expression) to look for in <code>file</code>.
* @param idxGroup
* the number of a group of the pattern <code>regex</code>.
* @return
* the set of strings representing the <code>idxGroup</code>
* of the pattern <code>regex</code> matching a line
* in file <code>file</code> or returns <code>null</code>
* in case of problems reading <code>file</code>.
*/
// used in LatexProcessor.runMakeIndexByNeed only
// **** a lot of copying from method matchInFile
Collection<String> collectMatches(File file, String regex, int idxGroup) {
Collection<String> res = new TreeSet<String>();
Pattern pattern = Pattern.compile(regex);
try {
// may throw FileNotFoundException < IOExcption
FileReader fileReader = new FileReader(file);
// BufferedReader for performance
BufferedReader bufferedReader = new BufferedReader(fileReader);
try {
// readLine may throw IOException
Matcher matcher;
for (String line = bufferedReader.readLine();
line != null;
// readLine may thr. IOException
line = bufferedReader.readLine()) {
matcher = pattern.matcher(line);
if (matcher.find()) {
// Here, a match has been found
res.add(matcher.group(idxGroup));
}
} // for
return res;
} catch (IOException ioe) {
// Error/Warning must be issued by invoking method
return null;
} finally {
// Here, an IOException may have occurred
// may log warning WFU03
closeQuietly(bufferedReader);
}
} catch (FileNotFoundException ffe) {
// Error/Warning must be issued by invoking method
return null;
}
}
// used in LatexPreProcessor and in LatexProcessor and in LatexDec
// at numerous places
// TBD: clarify what is wrong with mock that we cannot make this static
static File replaceSuffix(File file, String suffix) {
return new File(file.getParentFile(),
getFileNameWithoutSuffix(file) + suffix);
}
/**
* Deletes all files in the same folder as <code>pFile</code> directly,
* i.e. not in subfolders, which are accepted by <code>filter</code>.
* <p>
* Logging:
* <ul>
* <li> WFU01: Cannot read directory...
* <li> EFU05: Failed to delete file
* </ul>
*
* @param pFile
* a file in a folder to be deleted from.
* This is either a metapost file or a latex main file.
* @param filter
* a filter which decides which files
* from the parent directory of <code>pFile</code> to delete.
*/
// used in LatexPreProcessor.clearTargetMp
// used in LatexPreProcessor.clearTargetTex only
void deleteX(File pFile, FileFilter filter) {
// FIXME: not true for clear target.
// Required: cleanup in order reverse to creation.
assert pFile.exists() && !pFile.isDirectory()
: "Expected existing (regular) file "+pFile;
File dir = pFile.getParentFile();
// may log warning WFU01
File[] found = listFilesOrWarn(dir);
if (found == null) {
// Here, logging WFU01 already done
return;
}
for (File delFile : found) {
// FIXME: not true for clear target.
// Required: cleanup in order reverse to creation.
assert delFile.exists();
if (filter.accept(delFile)) {
assert delFile.exists() && !delFile.isDirectory()
: "Expected existing (regular) file "+delFile;
// may log EFU05: failed to delete
deleteOrError(delFile);
}
}
}
/**
* Deletes <code>delFile</code> or logs a warning.
* <p>
* Logging:
* EFU05: failed to delete
*
* @param delFile
* the existing file to be deleted.
* This must not be a directory.
*/
void deleteOrError(File delFile) {
assert delFile.exists() && !delFile.isDirectory()
: "Expected existing (regular) file "+delFile;
if (!delFile.delete()) {
this.log.error("EFU05: Cannot delete file '" +
delFile + "'. ");
}
}
/**
* Moves file <code>fromFile</code> to <code>toFile</code>
* or logs a warning.
* <p>
* Logging:
* EFU06: failed to move.
*
* @param fromFile
* the existing file to be moved.
* This must not be a directory.
* @param toFile
* the file to be moved to
* This must not be a directory.
*/
void moveOrError(File fromFile, File toFile) {
assert fromFile.exists() && !fromFile.isDirectory()
: "Expected existing (regular) source file "+fromFile;
assert ! toFile.isDirectory()
: "Expected (regular) target file "+toFile;
boolean success = fromFile.renameTo(toFile);
if (!success) {
this.log.error("EFU06: Cannot move file '" +
fromFile + "' to '" + toFile + "'. ");
}
}
/**
* Deletes all files in <code>texDir</code> including subdirectories
* which are not in <code>orgNode</code>.
* The background is, that <code>orgNode</code> represents the files
* originally in <code>texDir</code>.
* <p>
* Logging:
* <ul>
* <li> WFU01: Cannot read directory
* <li> EFU05: Cannot delete...
* </ul>
*
* @param orgNode
*
* @param texDir
*
*/
// used in LatexProcessor.create() only
// FIXME: warn if deletion failed.
void cleanUp(DirNode orgNode, File texDir) {
// constructor DirNode may log warning WFU01 Cannot read directory
// cleanUpRec may log warning EFU05 Cannot delete...
cleanUpRec(texDir, orgNode, new DirNode(texDir, this));
}
/**
* Deletes all files in <code>currNode</code>
* which are not in <code>orgNode</code> recursively
* including subdirectories.
* The background is, that <code>orgNode</code> represents the files
* originally in the directory and <code>currNode</code>
* the current ones at the end of the creating goal.
* <p>
* Logging:
* EFU05: Cannot delete...
*
* @param orgNode
* the node representing the original files.
* This is the latex source directory or a subdirectory.
* @param currNode
* the node representing the current files.
* This is the latex source directory or a subdirectory.
*/
// used in cleanUp only
private void cleanUpRec(File dir, DirNode orgNode, DirNode currNode) {
assert orgNode.getSubdirs().keySet()
.equals(currNode.getSubdirs().keySet());
File file;
for (String key : orgNode.getSubdirs().keySet()) {
file = new File(dir, key);
cleanUpRec(file,
orgNode .getSubdirs().get(key),
currNode.getSubdirs().get(key));
}
Collection<String> currFileNames = currNode.getRegularFileNames();
currFileNames.removeAll(orgNode.getRegularFileNames());
for (String fileName : currFileNames) {
file = new File(dir, fileName);
// may log error EFU05: Cannot delete file
deleteOrError(file);
}
}
// TBD: clarify whether this hack is really needed.
/**
* Temporarily generated file to be passed to {@link Converter#Makeindex}
* to allow to determine the version of the tool.
*/
static File EMPTY_IDX;
static File getEmptyIdx() {
if (EMPTY_IDX == null) {
try {
EMPTY_IDX = File.createTempFile("forMakeindex",
LatexProcessor.SUFFIX_IDX);
EMPTY_IDX.deleteOnExit();
replaceSuffix(EMPTY_IDX, LatexProcessor.SUFFIX_IND).deleteOnExit();
replaceSuffix(EMPTY_IDX, LatexProcessor.SUFFIX_ILG).deleteOnExit();
} catch(Exception e) {
// TBD: eliminate: shall be a warning or even less than that.
// But to that end, this must not be static.
throw new IllegalStateException("Could not create temp file.");
}
}
return EMPTY_IDX;
}
public static void main(String[] args) {
String regex = args[0];
String text = args[1];
text = "xx\nyzzz";
System.out.println("regex: "+regex);
System.out.println("text: "+text);
System.out.println("len: "+text.length());
Pattern pattern = Pattern.compile(regex, Pattern.MULTILINE);
java.util.regex.Matcher matcher = pattern.matcher(text);
matcher.useAnchoringBounds(true);
System.out.println("find: "+matcher.find());
System.out.println("hitEnd: "+matcher.hitEnd());
System.out.println("hitEnd: "+matcher.end());
}
}
| |
/*
* Copyright 2013 Ben Manes. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.benmanes.multiway;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.LockSupport;
import java.util.concurrent.locks.ReentrantLock;
import com.github.benmanes.multiway.ResourceKey.Status;
import com.github.benmanes.multiway.TransferPool.LoadingTransferPool;
import com.google.common.base.Stopwatch;
import com.google.common.cache.Weigher;
import com.google.common.testing.FakeTicker;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static com.jayway.awaitility.Awaitility.await;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
/**
* @author ben.manes@gmail.com (Ben Manes)
*/
public final class MultiwayPoolTest {
private static final Integer KEY_1 = 1;
private FakeTicker ticker;
private TestResourceLifecycle lifecycle;
private LoadingTransferPool<Integer, UUID> multiway;
@BeforeMethod
public void beforeMethod() {
ticker = new FakeTicker();
lifecycle = new TestResourceLifecycle();
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder());
}
@SuppressWarnings("unchecked")
LoadingTransferPool<Integer, UUID> makeMultiwayPool(MultiwayPoolBuilder<?, ?> builder) {
MultiwayPoolBuilder<Integer, UUID> pools = (MultiwayPoolBuilder<Integer, UUID>) builder;
if (pools.lifecycle == null) {
pools.lifecycle(lifecycle);
}
return (LoadingTransferPool<Integer, UUID>) pools.build(new TestResourceLoader());
}
@Test
public void borrow_concurrent() throws Exception {
final int numThreads = 10;
final int iterations = 100;
ConcurrentTestHarness.timeTasks(numThreads, new Runnable() {
@Override public void run() {
for (int i = 0; i < iterations; i++) {
UUID resource = multiway.borrow(KEY_1);
yield();
multiway.release(resource);
yield();
}
}
});
multiway.cleanUp();
int cycles = numThreads * iterations;
int size = (int) multiway.size();
assertThat(lifecycle.created(), is(size));
assertThat(lifecycle.borrows(), is(cycles));
assertThat(lifecycle.releases(), is(cycles));
//assertThat(multiway.transferQueues.get(KEY_1).size(), is(size));
}
@Test
public void borrow_sameInstance() {
UUID expected = getAndRelease(KEY_1);
UUID actual = getAndRelease(KEY_1);
assertThat(expected, is(actual));
assertThat(lifecycle.borrows(), is(2));
assertThat(lifecycle.releases(), is(2));
assertThat(lifecycle.removals(), is(0));
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void borrow_multipleReleases() {
UUID resource = multiway.borrow(KEY_1);
multiway.release(resource);
multiway.release(resource);
}
@Test(enabled = false)
public void borrow_fromTransfer() throws Exception {
Stopwatch stopwatch = new Stopwatch().start();
final AtomicBoolean start = new AtomicBoolean();
final AtomicBoolean done = new AtomicBoolean();
new Thread() {
@Override public void run() {
start.set(true);
UUID resource = multiway.borrow(KEY_1, 1, TimeUnit.NANOSECONDS);
multiway.release(resource, 1, TimeUnit.MINUTES);
done.set(true);
}
}.start();
await().untilTrue(start);
assertThat(done.get(), is(false));
UUID resource = multiway.borrow(KEY_1);
await().untilTrue(done);
multiway.release(resource);
assertThat(stopwatch.elapsed(TimeUnit.MINUTES), is(0L));
}
@Test
public void borrow_callable() {
MultiwayPool<Integer, UUID> multiway = MultiwayPoolBuilder.newBuilder().build();
final UUID expected = UUID.randomUUID();
UUID resource = multiway.borrow(KEY_1, new Callable<UUID>() {
@Override public UUID call() throws Exception {
return expected;
}
});
assertThat(resource, is(expected));
multiway.release(resource);
}
@Test
public void evict_immediately() {
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder().maximumSize(0));
UUID first = getAndRelease(KEY_1);
UUID second = getAndRelease(KEY_1);
assertThat(first, is(not(second)));
assertThat(lifecycle.removals(), is(2));
assertThat(multiway.size(), is(0L));
//assertThat(multiway.transferQueues.getIfPresent(KEY_1), is(empty()));
}
@Test
public void evict_whenIdle() {
getAndRelease(KEY_1);
ResourceKey<?> resourceKey = getResourceKey();
assertThat(resourceKey.getStatus(), is(Status.IDLE));
multiway.invalidateAll();
assertThat(multiway.size(), is(0L));
assertThat(resourceKey.getStatus(), is(Status.DEAD));
}
@Test
public void evict_whenInFlight() {
UUID resource = multiway.borrow(KEY_1);
ResourceKey<?> resourceKey = getResourceKey();
assertThat(resourceKey.getStatus(), is(Status.IN_FLIGHT));
multiway.invalidateAll();
assertThat(multiway.size(), is(0L));
assertThat(resourceKey.getStatus(), is(Status.RETIRED));
multiway.release(resource);
assertThat(resourceKey.getStatus(), is(Status.DEAD));
}
@Test
public void evict_whenRetired() {
getAndRelease(KEY_1);
ResourceKey<?> resourceKey = getResourceKey();
// Simulate transition due to idle cache expiration
resourceKey.goFromIdleToRetired();
multiway.invalidateAll();
assertThat(multiway.size(), is(0L));
assertThat(lifecycle.borrows(), is(1));
assertThat(lifecycle.releases(), is(1));
assertThat(lifecycle.removals(), is(1));
assertThat(resourceKey.getStatus(), is(Status.DEAD));
}
@Test
public void evict_multipleQueues() {
for (int i = 0; i < 10; i++) {
getAndRelease(i);
}
assertThat(multiway.size(), is(10L));
multiway.invalidateAll();
assertThat(multiway.size(), is(0L));
assertThat(lifecycle.borrows(), is(10));
assertThat(lifecycle.releases(), is(10));
assertThat(lifecycle.removals(), is(10));
}
@Test
public void evict_maximumSize() {
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder().maximumSize(10));
List<UUID> resources = new ArrayList<>();
for (int i = 0; i < 100; i++) {
resources.add(multiway.borrow(KEY_1));
}
for (UUID resource : resources) {
multiway.release(resource);
}
assertThat(multiway.size(), is(10L));
assertThat(lifecycle.borrows(), is(100));
assertThat(lifecycle.releases(), is(100));
assertThat(lifecycle.removals(), is(90));
}
@Test
public void evict_maximumWeight() {
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder().maximumWeight(10)
.weigher(new Weigher<Integer, UUID>() {
@Override public int weigh(Integer key, UUID resource) {
return 5;
}
}));
List<UUID> resources = new ArrayList<>();
for (int i = 0; i < 100; i++) {
resources.add(multiway.borrow(KEY_1));
}
for (UUID resource : resources) {
multiway.release(resource);
}
assertThat(multiway.size(), is(2L));
assertThat(lifecycle.borrows(), is(100));
assertThat(lifecycle.releases(), is(100));
assertThat(lifecycle.removals(), is(98));
}
@Test
public void evict_expireAfterAccess() {
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
.ticker(ticker).expireAfterAccess(1, TimeUnit.MINUTES));
List<UUID> resources = new ArrayList<>();
for (int i = 0; i < 100; i++) {
resources.add(multiway.borrow(KEY_1));
}
for (UUID resource : resources) {
multiway.release(resource);
}
ticker.advance(10, TimeUnit.MINUTES);
multiway.cleanUp();
assertThat(multiway.size(), is(0L));
assertThat(lifecycle.borrows(), is(100));
assertThat(lifecycle.releases(), is(100));
assertThat(lifecycle.removals(), is(100));
}
@Test
public void evict_expireAfterWrite() {
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
.ticker(ticker).expireAfterWrite(1, TimeUnit.MINUTES));
List<UUID> resources = new ArrayList<>();
for (int i = 0; i < 100; i++) {
resources.add(multiway.borrow(KEY_1));
}
for (UUID resource : resources) {
multiway.release(resource);
}
ticker.advance(10, TimeUnit.MINUTES);
multiway.cleanUp();
assertThat(multiway.size(), is(0L));
assertThat(lifecycle.borrows(), is(100));
assertThat(lifecycle.releases(), is(100));
assertThat(lifecycle.removals(), is(100));
}
@Test
public void release_toPool() {
UUID resource = multiway.borrow(KEY_1);
assertThat(multiway.size(), is(1L));
assertThat(lifecycle.borrows(), is(1));
assertThat(lifecycle.releases(), is(0));
assertThat(lifecycle.removals(), is(0));
TransferPool<Integer, UUID>.ResourceHandle handle = multiway.inFlight.get().get(resource);
assertThat(handle.resourceKey.getStatus(), is(Status.IN_FLIGHT));
multiway.release(resource);
assertThat(multiway.size(), is(1L));
assertThat(lifecycle.releases(), is(1));
assertThat(lifecycle.removals(), is(0));
assertThat(handle.toString(), handle.resourceKey.getStatus(), is(Status.IDLE));
}
@Test
public void release_andDiscard() {
UUID resource = multiway.borrow(KEY_1);
TransferPool<Integer, UUID>.ResourceHandle handle = multiway.inFlight.get().get(resource);
multiway.invalidateAll();
assertThat(multiway.size(), is(0L));
assertThat(lifecycle.borrows(), is(1));
assertThat(lifecycle.releases(), is(0));
assertThat(lifecycle.removals(), is(0));
assertThat(handle.resourceKey.getStatus(), is(Status.RETIRED));
multiway.release(resource);
assertThat(lifecycle.releases(), is(1));
assertThat(lifecycle.removals(), is(1));
assertThat(handle.resourceKey.getStatus(), is(Status.DEAD));
}
@Test
public void invalidate_whenInFlight() {
UUID resource = multiway.borrow(KEY_1);
TransferPool<Integer, UUID>.ResourceHandle handle = multiway.inFlight.get().get(resource);
assertThat(multiway.size(), is(1L));
assertThat(lifecycle.borrows(), is(1));
assertThat(lifecycle.releases(), is(0));
assertThat(lifecycle.removals(), is(0));
assertThat(handle.resourceKey.getStatus(), is(Status.IN_FLIGHT));
multiway.releaseAndInvalidate(resource);
assertThat(multiway.size(), is(0L));
assertThat(lifecycle.releases(), is(1));
assertThat(lifecycle.removals(), is(1));
assertThat(handle.resourceKey.getStatus(), is(Status.DEAD));
}
@Test
public void invalidate_whenRetired() {
UUID resource = multiway.borrow(KEY_1);
TransferPool<Integer, UUID>.ResourceHandle handle = multiway.inFlight.get().get(resource);
multiway.invalidateAll();
assertThat(multiway.size(), is(0L));
assertThat(lifecycle.borrows(), is(1));
assertThat(lifecycle.releases(), is(0));
assertThat(lifecycle.removals(), is(0));
assertThat(handle.resourceKey.getStatus(), is(Status.RETIRED));
multiway.releaseAndInvalidate(resource);
assertThat(multiway.size(), is(0L));
assertThat(lifecycle.releases(), is(1));
assertThat(lifecycle.removals(), is(1));
assertThat(handle.resourceKey.getStatus(), is(Status.DEAD));
}
//
// @Test
// public void discardPool() {
// UUID resource = multiway.borrow(KEY_1);
// GcFinalization.awaitFullGc();
// assertThat(multiway.transferStacks.size(), is(1L));
//
// multiway.release(resource);
// multiway.invalidateAll();
//
// GcFinalization.awaitFullGc();
// multiway.transferStacks.cleanUp();
// assertThat(multiway.transferStacks.size(), is(0L));
// }
@Test
public void invalidate() {
for (int i = 0; i < 10; i++) {
getAndRelease(i);
}
multiway.invalidate(5);
assertThat(multiway.size(), is(9L));
}
@Test
public void invalidateAll() {
for (int i = 0; i < 10; i++) {
getAndRelease(i);
}
multiway.invalidateAll();
assertThat(multiway.size(), is(0L));
}
@Test
public void tryToGetPooledResourceHandle_notFound() {
getAndRelease(KEY_1);
ResourceKey<Integer> resourceKey = getResourceKey();
multiway.invalidateAll();
assertThat(multiway.tryToGetPooledResourceHandle(resourceKey), is(nullValue()));
}
@Test
public void tryToGetPooledResourceHandle_notIdle() {
getAndRelease(KEY_1);
ResourceKey<Integer> resourceKey = getResourceKey();
resourceKey.goFromIdleToRetired();
assertThat(multiway.tryToGetPooledResourceHandle(resourceKey), is(nullValue()));
}
@Test
public void stats() {
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder().recordStats());
getAndRelease(KEY_1);
getAndRelease(KEY_1);
assertThat(multiway.stats().hitCount(), is(1L));
assertThat(multiway.stats().loadSuccessCount(), is(1L));
}
//
// @Test
// public void lifecycle_onCreate_fail() {
// final AtomicBoolean onRemovalCalled = new AtomicBoolean();
// multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
// .lifecycle(new ResourceLifecycle<Integer, UUID>() {
// @Override public void onCreate(Integer key, UUID resource) {
// throw new UnsupportedOperationException();
// }
// @Override public void onRemoval(Integer key, UUID resource) {
// onRemovalCalled.set(true);
// }
// }));
// try {
// getAndRelease(KEY_1);
// Assert.fail();
// } catch (Exception e) {
// assertThat(multiway.cache.size(), is(0L));
// assertThat(onRemovalCalled.get(), is(true));
// }
// }
//
// @Test
// public void lifecycle_onBorrow_fail() {
// final AtomicBoolean onRemovalCalled = new AtomicBoolean();
// multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
// .lifecycle(new ResourceLifecycle<Integer, UUID>() {
// @Override public void onBorrow(Integer key, UUID resource) {
// throw new UnsupportedOperationException();
// }
// @Override public void onRemoval(Integer key, UUID resource) {
// onRemovalCalled.set(true);
// }
// }));
// try {
// getAndRelease(KEY_1);
// Assert.fail();
// } catch (Exception e) {
// assertThat(multiway.cache.size(), is(0L));
// assertThat(onRemovalCalled.get(), is(true));
// }
// }
//
// @Test
// public void lifecycle_onRelease_fail() {
// final AtomicBoolean onRemovalCalled = new AtomicBoolean();
// multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
// .lifecycle(new ResourceLifecycle<Integer, UUID>() {
// @Override public void onRelease(Integer key, UUID resource) {
// throw new UnsupportedOperationException();
// }
// @Override public void onRemoval(Integer key, UUID resource) {
// onRemovalCalled.set(true);
// }
// }));
// try {
// getAndRelease(KEY_1);
// Assert.fail();
// } catch (Exception e) {
// assertThat(multiway.cache.size(), is(0L));
// assertThat(onRemovalCalled.get(), is(true));
// }
// }
//
// @Test
// public void lifecycle_onRemove_fail_pool() {
// multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
// .lifecycle(new ResourceLifecycle<Integer, UUID>() {
// @Override public void onRemoval(Integer key, UUID resource) {
// throw new UnsupportedOperationException();
// }
// }));
// getAndRelease(KEY_1);
// multiway.invalidateAll();
// assertThat(multiway.cache.size(), is(0L));
// }
@Test
public void lifecycle_onRemove_fail_handle() {
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
.lifecycle(new ResourceLifecycle<Integer, UUID>() {
@Override public void onRemoval(Integer key, UUID resource) {
throw new UnsupportedOperationException();
}
}));
UUID resource = multiway.borrow(KEY_1);
try {
multiway.releaseAndInvalidate(resource);
Assert.fail();
} catch (UnsupportedOperationException e) {
assertThat(multiway.size(), is(0L));
}
}
@Test
public void unlockOnCleanup() {
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
.expireAfterAccess(1, TimeUnit.MINUTES));
Runnable runner = new Runnable() {
@Override public void run() {
throw new IllegalStateException();
}
};
getAndRelease(KEY_1);
try {
multiway.timeToIdlePolicy.get().schedule(getResourceKey(), runner);
Assert.fail();
} catch (IllegalStateException e) {
assertThat(((ReentrantLock) multiway.timeToIdlePolicy.get().idleLock).isLocked(), is(false));
}
}
@Test
public void concurrent() throws Exception {
long maxSize = 10;
multiway = makeMultiwayPool(MultiwayPoolBuilder.newBuilder()
.expireAfterAccess(100, TimeUnit.NANOSECONDS)
.maximumSize(maxSize));
ConcurrentTestHarness.timeTasks(10, new Runnable() {
final ThreadLocalRandom random = ThreadLocalRandom.current();
@Override
public void run() {
Deque<UUID> resources = new ArrayDeque<>();
for (int i = 0; i < 100; i++) {
execute(resources, i);
yield();
}
for (UUID resource : resources) {
multiway.release(resource);
}
}
void execute(Deque<UUID> resources, int key) {
if (random.nextBoolean()) {
UUID resource = multiway.borrow(key);
resources.add(resource);
} else if (!resources.isEmpty()) {
UUID resource = resources.remove();
multiway.release(resource);
}
}
});
multiway.cleanUp();
// long queued = 0;
long size = multiway.size();
// for (Queue<?> queue : multiway.transferQueues.asMap().values()) {
// queued += queue.size();
// }
//
// assertThat(queued, is(size));
assertThat(size, lessThanOrEqualTo(maxSize));
assertThat(lifecycle.releases(), is(lifecycle.borrows()));
assertThat(lifecycle.created(), is((int) size + lifecycle.removals()));
}
private UUID getAndRelease(Integer key) {
UUID resource = multiway.borrow(key);
multiway.release(resource);
return resource;
}
private ResourceKey<Integer> getResourceKey() {
return multiway.cache.keySet().iterator().next();
}
private void yield() {
Thread.yield();
LockSupport.parkNanos(1L);
}
private static final class TestResourceLoader implements ResourceLoader<Integer, UUID> {
@Override public UUID load(Integer key) throws Exception {
return UUID.randomUUID();
}
}
private static final class TestResourceLifecycle extends ResourceLifecycle<Integer, UUID> {
final AtomicInteger created = new AtomicInteger();
final AtomicInteger borrows = new AtomicInteger();
final AtomicInteger releases = new AtomicInteger();
final AtomicInteger removals = new AtomicInteger();
@Override
public void onCreate(Integer key, UUID resource) {
created.incrementAndGet();
}
@Override
public void onBorrow(Integer key, UUID resource) {
borrows.incrementAndGet();
}
@Override
public void onRelease(Integer key, UUID resource) {
releases.incrementAndGet();
}
@Override
public void onRemoval(Integer key, UUID resource) {
removals.incrementAndGet();
}
public int created() {
return created.get();
}
public int borrows() {
return borrows.get();
}
public int releases() {
return releases.get();
}
public int removals() {
return removals.get();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.hdfs.BlockReader;
import org.apache.hadoop.hdfs.BlockReaderFactory;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.RemotePeerFactory;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.net.Peer;
import org.apache.hadoop.hdfs.net.TcpPeerServer;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DatanodeInfoWithStorage;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockCollection;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementStatus;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.NumberReplicas;
import org.apache.hadoop.hdfs.server.datanode.CachingStrategy;
import org.apache.hadoop.hdfs.util.LightWeightLinkedSet;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting;
/**
* This class provides rudimentary checking of DFS volumes for errors and
* sub-optimal conditions.
* <p>The tool scans all files and directories, starting from an indicated
* root path. The following abnormal conditions are detected and handled:</p>
* <ul>
* <li>files with blocks that are completely missing from all datanodes.<br/>
* In this case the tool can perform one of the following actions:
* <ul>
* <li>none ({@link #FIXING_NONE})</li>
* <li>move corrupted files to /lost+found directory on DFS
* ({@link #FIXING_MOVE}). Remaining data blocks are saved as a
* block chains, representing longest consecutive series of valid blocks.</li>
* <li>delete corrupted files ({@link #FIXING_DELETE})</li>
* </ul>
* </li>
* <li>detect files with under-replicated or over-replicated blocks</li>
* </ul>
* Additionally, the tool collects a detailed overall DFS statistics, and
* optionally can print detailed statistics on block locations and replication
* factors of each file.
*/
@InterfaceAudience.Private
public class NamenodeFsck implements DataEncryptionKeyFactory {
public static final Log LOG = LogFactory.getLog(NameNode.class.getName());
// return string marking fsck status
public static final String CORRUPT_STATUS = "is CORRUPT";
public static final String HEALTHY_STATUS = "is HEALTHY";
public static final String DECOMMISSIONING_STATUS = "is DECOMMISSIONING";
public static final String DECOMMISSIONED_STATUS = "is DECOMMISSIONED";
public static final String NONEXISTENT_STATUS = "does not exist";
public static final String FAILURE_STATUS = "FAILED";
private final NameNode namenode;
private final NetworkTopology networktopology;
private final int totalDatanodes;
private final InetAddress remoteAddress;
private String lostFound = null;
private boolean lfInited = false;
private boolean lfInitedOk = false;
private boolean showFiles = false;
private boolean showOpenFiles = false;
private boolean showBlocks = false;
private boolean showLocations = false;
private boolean showRacks = false;
private boolean showStoragePolcies = false;
private boolean showprogress = false;
private boolean showCorruptFileBlocks = false;
private boolean showReplicaDetails = false;
private long staleInterval;
/**
* True if we encountered an internal error during FSCK, such as not being
* able to delete a corrupt file.
*/
private boolean internalError = false;
/**
* True if the user specified the -move option.
*
* Whe this option is in effect, we will copy salvaged blocks into the lost
* and found. */
private boolean doMove = false;
/**
* True if the user specified the -delete option.
*
* Whe this option is in effect, we will delete corrupted files.
*/
private boolean doDelete = false;
String path = "/";
private String blockIds = null;
// We return back N files that are corrupt; the list of files returned is
// ordered by block id; to allow continuation support, pass in the last block
// # from previous call
private final String[] currentCookie = new String[] { null };
private final Configuration conf;
private final PrintWriter out;
private List<String> snapshottableDirs = null;
private final BlockPlacementPolicy bpPolicy;
private StoragePolicySummary storageTypeSummary = null;
/**
* Filesystem checker.
* @param conf configuration (namenode config)
* @param namenode namenode that this fsck is going to use
* @param pmap key=value[] map passed to the http servlet as url parameters
* @param out output stream to write the fsck output
* @param totalDatanodes number of live datanodes
* @param remoteAddress source address of the fsck request
*/
NamenodeFsck(Configuration conf, NameNode namenode,
NetworkTopology networktopology,
Map<String,String[]> pmap, PrintWriter out,
int totalDatanodes, InetAddress remoteAddress) {
this.conf = conf;
this.namenode = namenode;
this.networktopology = networktopology;
this.out = out;
this.totalDatanodes = totalDatanodes;
this.remoteAddress = remoteAddress;
this.bpPolicy = BlockPlacementPolicy.getInstance(conf, null,
networktopology,
namenode.getNamesystem().getBlockManager().getDatanodeManager()
.getHost2DatanodeMap());
this.staleInterval =
conf.getLong(DFSConfigKeys.DFS_NAMENODE_STALE_DATANODE_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_STALE_DATANODE_INTERVAL_DEFAULT);
for (Iterator<String> it = pmap.keySet().iterator(); it.hasNext();) {
String key = it.next();
if (key.equals("path")) { this.path = pmap.get("path")[0]; }
else if (key.equals("move")) { this.doMove = true; }
else if (key.equals("delete")) { this.doDelete = true; }
else if (key.equals("files")) { this.showFiles = true; }
else if (key.equals("blocks")) { this.showBlocks = true; }
else if (key.equals("locations")) { this.showLocations = true; }
else if (key.equals("racks")) { this.showRacks = true; }
else if (key.equals("replicadetails")) {
this.showReplicaDetails = true;
}
else if (key.equals("storagepolicies")) { this.showStoragePolcies = true; }
else if (key.equals("showprogress")) { this.showprogress = true; }
else if (key.equals("openforwrite")) {this.showOpenFiles = true; }
else if (key.equals("listcorruptfileblocks")) {
this.showCorruptFileBlocks = true;
} else if (key.equals("startblockafter")) {
this.currentCookie[0] = pmap.get("startblockafter")[0];
} else if (key.equals("includeSnapshots")) {
this.snapshottableDirs = new ArrayList<String>();
} else if (key.equals("blockId")) {
this.blockIds = pmap.get("blockId")[0];
}
}
}
/**
* Check block information given a blockId number
*
*/
public void blockIdCK(String blockId) {
if(blockId == null) {
out.println("Please provide valid blockId!");
return;
}
BlockManager bm = namenode.getNamesystem().getBlockManager();
try {
//get blockInfo
Block block = new Block(Block.getBlockId(blockId));
//find which file this block belongs to
BlockInfo blockInfo = bm.getStoredBlock(block);
if(blockInfo == null) {
out.println("Block "+ blockId +" " + NONEXISTENT_STATUS);
LOG.warn("Block "+ blockId + " " + NONEXISTENT_STATUS);
return;
}
BlockCollection bc = bm.getBlockCollection(blockInfo);
INode iNode = (INode) bc;
NumberReplicas numberReplicas= bm.countNodes(blockInfo);
out.println("Block Id: " + blockId);
out.println("Block belongs to: "+iNode.getFullPathName());
out.println("No. of Expected Replica: " + blockInfo.getReplication());
out.println("No. of live Replica: " + numberReplicas.liveReplicas());
out.println("No. of excess Replica: " + numberReplicas.excessReplicas());
out.println("No. of stale Replica: " +
numberReplicas.replicasOnStaleNodes());
out.println("No. of decommissioned Replica: "
+ numberReplicas.decommissioned());
out.println("No. of decommissioning Replica: "
+ numberReplicas.decommissioning());
out.println("No. of corrupted Replica: " +
numberReplicas.corruptReplicas());
//record datanodes that have corrupted block replica
Collection<DatanodeDescriptor> corruptionRecord = null;
if (bm.getCorruptReplicas(block) != null) {
corruptionRecord = bm.getCorruptReplicas(block);
}
//report block replicas status on datanodes
for(int idx = (blockInfo.numNodes()-1); idx >= 0; idx--) {
DatanodeDescriptor dn = blockInfo.getDatanode(idx);
out.print("Block replica on datanode/rack: " + dn.getHostName() +
dn.getNetworkLocation() + " ");
if (corruptionRecord != null && corruptionRecord.contains(dn)) {
out.print(CORRUPT_STATUS+"\t ReasonCode: "+
bm.getCorruptReason(block,dn));
} else if (dn.isDecommissioned() ){
out.print(DECOMMISSIONED_STATUS);
} else if (dn.isDecommissionInProgress()) {
out.print(DECOMMISSIONING_STATUS);
} else {
out.print(HEALTHY_STATUS);
}
out.print("\n");
}
} catch (Exception e){
String errMsg = "Fsck on blockId '" + blockId;
LOG.warn(errMsg, e);
out.println(e.getMessage());
out.print("\n\n" + errMsg);
LOG.warn("Error in looking up block", e);
}
}
/**
* Check files on DFS, starting from the indicated path.
*/
public void fsck() {
final long startTime = Time.monotonicNow();
try {
if(blockIds != null) {
String[] blocks = blockIds.split(" ");
StringBuilder sb = new StringBuilder();
sb.append("FSCK started by " +
UserGroupInformation.getCurrentUser() + " from " +
remoteAddress + " at " + new Date());
out.println(sb.toString());
sb.append(" for blockIds: \n");
for (String blk: blocks) {
if(blk == null || !blk.contains(Block.BLOCK_FILE_PREFIX)) {
out.println("Incorrect blockId format: " + blk);
continue;
}
out.print("\n");
blockIdCK(blk);
sb.append(blk + "\n");
}
LOG.info(sb.toString());
namenode.getNamesystem().logFsckEvent("/", remoteAddress);
out.flush();
return;
}
String msg = "FSCK started by " + UserGroupInformation.getCurrentUser()
+ " from " + remoteAddress + " for path " + path + " at " + new Date();
LOG.info(msg);
out.println(msg);
namenode.getNamesystem().logFsckEvent(path, remoteAddress);
if (snapshottableDirs != null) {
SnapshottableDirectoryStatus[] snapshotDirs = namenode.getRpcServer()
.getSnapshottableDirListing();
if (snapshotDirs != null) {
for (SnapshottableDirectoryStatus dir : snapshotDirs) {
snapshottableDirs.add(dir.getFullPath().toString());
}
}
}
final HdfsFileStatus file = namenode.getRpcServer().getFileInfo(path);
if (file != null) {
if (showCorruptFileBlocks) {
listCorruptFileBlocks();
return;
}
if (this.showStoragePolcies) {
storageTypeSummary = new StoragePolicySummary(
namenode.getNamesystem().getBlockManager().getStoragePolicies());
}
Result res = new Result(conf);
check(path, file, res);
out.println(res);
out.println(" Number of data-nodes:\t\t" + totalDatanodes);
out.println(" Number of racks:\t\t" + networktopology.getNumOfRacks());
if (this.showStoragePolcies) {
out.print(storageTypeSummary.toString());
}
out.println("FSCK ended at " + new Date() + " in "
+ (Time.monotonicNow() - startTime + " milliseconds"));
// If there were internal errors during the fsck operation, we want to
// return FAILURE_STATUS, even if those errors were not immediately
// fatal. Otherwise many unit tests will pass even when there are bugs.
if (internalError) {
throw new IOException("fsck encountered internal errors!");
}
// DFSck client scans for the string HEALTHY/CORRUPT to check the status
// of file system and return appropriate code. Changing the output
// string might break testcases. Also note this must be the last line
// of the report.
if (res.isHealthy()) {
out.print("\n\nThe filesystem under path '" + path + "' " + HEALTHY_STATUS);
} else {
out.print("\n\nThe filesystem under path '" + path + "' " + CORRUPT_STATUS);
}
} else {
out.print("\n\nPath '" + path + "' " + NONEXISTENT_STATUS);
}
} catch (Exception e) {
String errMsg = "Fsck on path '" + path + "' " + FAILURE_STATUS;
LOG.warn(errMsg, e);
out.println("FSCK ended at " + new Date() + " in "
+ (Time.monotonicNow() - startTime + " milliseconds"));
out.println(e.getMessage());
out.print("\n\n" + errMsg);
} finally {
out.close();
}
}
private void listCorruptFileBlocks() throws IOException {
Collection<FSNamesystem.CorruptFileBlockInfo> corruptFiles = namenode.
getNamesystem().listCorruptFileBlocks(path, currentCookie);
int numCorruptFiles = corruptFiles.size();
String filler;
if (numCorruptFiles > 0) {
filler = Integer.toString(numCorruptFiles);
} else if (currentCookie[0].equals("0")) {
filler = "no";
} else {
filler = "no more";
}
out.println("Cookie:\t" + currentCookie[0]);
for (FSNamesystem.CorruptFileBlockInfo c : corruptFiles) {
out.println(c.toString());
}
out.println("\n\nThe filesystem under path '" + path + "' has " + filler
+ " CORRUPT files");
out.println();
}
@VisibleForTesting
void check(String parent, HdfsFileStatus file, Result res) throws IOException {
String path = file.getFullName(parent);
if (file.isDir()) {
checkDir(path, res);
return;
}
if (file.isSymlink()) {
if (showFiles) {
out.println(path + " <symlink>");
}
res.totalSymlinks++;
return;
}
LocatedBlocks blocks = getBlockLocations(path, file);
if (blocks == null) { // the file is deleted
return;
}
collectFileSummary(path, file, res, blocks);
collectBlocksSummary(parent, file, res, blocks);
}
private void checkDir(String path, Result res) throws IOException {
if (snapshottableDirs != null && snapshottableDirs.contains(path)) {
String snapshotPath = (path.endsWith(Path.SEPARATOR) ? path : path
+ Path.SEPARATOR)
+ HdfsConstants.DOT_SNAPSHOT_DIR;
HdfsFileStatus snapshotFileInfo = namenode.getRpcServer().getFileInfo(
snapshotPath);
check(snapshotPath, snapshotFileInfo, res);
}
byte[] lastReturnedName = HdfsFileStatus.EMPTY_NAME;
DirectoryListing thisListing;
if (showFiles) {
out.println(path + " <dir>");
}
res.totalDirs++;
do {
assert lastReturnedName != null;
thisListing = namenode.getRpcServer().getListing(
path, lastReturnedName, false);
if (thisListing == null) {
return;
}
HdfsFileStatus[] files = thisListing.getPartialListing();
for (int i = 0; i < files.length; i++) {
check(path, files[i], res);
}
lastReturnedName = thisListing.getLastName();
} while (thisListing.hasMore());
}
private LocatedBlocks getBlockLocations(String path, HdfsFileStatus file)
throws IOException {
long fileLen = file.getLen();
LocatedBlocks blocks = null;
final FSNamesystem fsn = namenode.getNamesystem();
fsn.readLock();
try {
blocks = FSDirStatAndListingOp.getBlockLocations(
fsn.getFSDirectory(), fsn.getPermissionChecker(),
path, 0, fileLen, false)
.blocks;
} catch (FileNotFoundException fnfe) {
blocks = null;
} finally {
fsn.readUnlock();
}
return blocks;
}
private void collectFileSummary(String path, HdfsFileStatus file, Result res,
LocatedBlocks blocks) throws IOException {
long fileLen = file.getLen();
boolean isOpen = blocks.isUnderConstruction();
if (isOpen && !showOpenFiles) {
// We collect these stats about open files to report with default options
res.totalOpenFilesSize += fileLen;
res.totalOpenFilesBlocks += blocks.locatedBlockCount();
res.totalOpenFiles++;
return;
}
res.totalFiles++;
res.totalSize += fileLen;
res.totalBlocks += blocks.locatedBlockCount();
if (showOpenFiles && isOpen) {
out.print(path + " " + fileLen + " bytes, " +
blocks.locatedBlockCount() + " block(s), OPENFORWRITE: ");
} else if (showFiles) {
out.print(path + " " + fileLen + " bytes, " +
blocks.locatedBlockCount() + " block(s): ");
} else if (showprogress) {
out.print('.');
}
if ((showprogress) && res.totalFiles % 100 == 0) {
out.println();
out.flush();
}
}
private void collectBlocksSummary(String parent, HdfsFileStatus file, Result res,
LocatedBlocks blocks) throws IOException {
String path = file.getFullName(parent);
boolean isOpen = blocks.isUnderConstruction();
if (isOpen && !showOpenFiles) {
return;
}
int missing = 0;
int corrupt = 0;
long missize = 0;
long corruptSize = 0;
int underReplicatedPerFile = 0;
int misReplicatedPerFile = 0;
StringBuilder report = new StringBuilder();
int blockNumber = 0;
final LocatedBlock lastBlock = blocks.getLastLocatedBlock();
for (LocatedBlock lBlk : blocks.getLocatedBlocks()) {
ExtendedBlock block = lBlk.getBlock();
if (!blocks.isLastBlockComplete() && lastBlock != null &&
lastBlock.getBlock().equals(block)) {
// this is the last block and this is not complete. ignore it since
// it is under construction
continue;
}
BlockManager bm = namenode.getNamesystem().getBlockManager();
final BlockInfo storedBlock = bm.getStoredBlock(
block.getLocalBlock());
// count decommissionedReplicas / decommissioningReplicas
NumberReplicas numberReplicas = bm.countNodes(storedBlock);
int decommissionedReplicas = numberReplicas.decommissioned();;
int decommissioningReplicas = numberReplicas.decommissioning();
res.decommissionedReplicas += decommissionedReplicas;
res.decommissioningReplicas += decommissioningReplicas;
// count total replicas
int liveReplicas = numberReplicas.liveReplicas();
int totalReplicasPerBlock = liveReplicas + decommissionedReplicas +
decommissioningReplicas;
res.totalReplicas += totalReplicasPerBlock;
// count expected replicas
short targetFileReplication = file.getReplication();
res.numExpectedReplicas += targetFileReplication;
// count under min repl'd blocks
if(totalReplicasPerBlock < res.minReplication){
res.numUnderMinReplicatedBlocks++;
}
// count excessive Replicas / over replicated blocks
if (liveReplicas > targetFileReplication) {
res.excessiveReplicas += (liveReplicas - targetFileReplication);
res.numOverReplicatedBlocks += 1;
}
// count corrupt blocks
boolean isCorrupt = lBlk.isCorrupt();
if (isCorrupt) {
res.addCorrupt(block.getNumBytes());
corrupt++;
corruptSize += block.getNumBytes();
out.print("\n" + path + ": CORRUPT blockpool " +
block.getBlockPoolId() + " block " + block.getBlockName() + "\n");
}
// count minimally replicated blocks
if (totalReplicasPerBlock >= res.minReplication)
res.numMinReplicatedBlocks++;
// count missing replicas / under replicated blocks
if (totalReplicasPerBlock < targetFileReplication && totalReplicasPerBlock > 0) {
res.missingReplicas += (targetFileReplication - totalReplicasPerBlock);
res.numUnderReplicatedBlocks += 1;
underReplicatedPerFile++;
if (!showFiles) {
out.print("\n" + path + ": ");
}
out.println(" Under replicated " + block +
". Target Replicas is " +
targetFileReplication + " but found " +
liveReplicas + " live replica(s), " +
decommissionedReplicas + " decommissioned replica(s) and " +
decommissioningReplicas + " decommissioning replica(s).");
}
// count mis replicated blocks
BlockPlacementStatus blockPlacementStatus = bpPolicy
.verifyBlockPlacement(path, lBlk, targetFileReplication);
if (!blockPlacementStatus.isPlacementPolicySatisfied()) {
res.numMisReplicatedBlocks++;
misReplicatedPerFile++;
if (!showFiles) {
if(underReplicatedPerFile == 0)
out.println();
out.print(path + ": ");
}
out.println(" Replica placement policy is violated for " +
block + ". " + blockPlacementStatus.getErrorDescription());
}
// count storage summary
if (this.showStoragePolcies && lBlk.getStorageTypes() != null) {
countStorageTypeSummary(file, lBlk);
}
// report
String blkName = block.toString();
report.append(blockNumber + ". " + blkName + " len=" + block.getNumBytes());
if (totalReplicasPerBlock == 0 && !isCorrupt) {
// If the block is corrupted, it means all its available replicas are
// corrupted. We don't mark it as missing given these available replicas
// might still be accessible as the block might be incorrectly marked as
// corrupted by client machines.
report.append(" MISSING!");
res.addMissing(block.toString(), block.getNumBytes());
missing++;
missize += block.getNumBytes();
} else {
report.append(" Live_repl=" + liveReplicas);
if (showLocations || showRacks || showReplicaDetails) {
StringBuilder sb = new StringBuilder("[");
Iterable<DatanodeStorageInfo> storages = bm.getStorages(block.getLocalBlock());
for (Iterator<DatanodeStorageInfo> iterator = storages.iterator(); iterator.hasNext();) {
DatanodeStorageInfo storage = iterator.next();
DatanodeDescriptor dnDesc = storage.getDatanodeDescriptor();
if (showRacks) {
sb.append(NodeBase.getPath(dnDesc));
} else {
sb.append(new DatanodeInfoWithStorage(dnDesc, storage.getStorageID(), storage
.getStorageType()));
}
if (showReplicaDetails) {
LightWeightLinkedSet<Block> blocksExcess =
bm.excessReplicateMap.get(dnDesc.getDatanodeUuid());
Collection<DatanodeDescriptor> corruptReplicas =
bm.getCorruptReplicas(block.getLocalBlock());
sb.append("(");
if (dnDesc.isDecommissioned()) {
sb.append("DECOMMISSIONED)");
} else if (dnDesc.isDecommissionInProgress()) {
sb.append("DECOMMISSIONING)");
} else if (corruptReplicas != null && corruptReplicas.contains(dnDesc)) {
sb.append("CORRUPT)");
} else if (blocksExcess != null && blocksExcess.contains(block.getLocalBlock())) {
sb.append("EXCESS)");
} else if (dnDesc.isStale(this.staleInterval)) {
sb.append("STALE_NODE)");
} else if (storage.areBlockContentsStale()) {
sb.append("STALE_BLOCK_CONTENT)");
} else {
sb.append("LIVE)");
}
}
if (iterator.hasNext()) {
sb.append(", ");
}
}
sb.append(']');
report.append(" " + sb.toString());
}
}
report.append('\n');
blockNumber++;
}
// count corrupt file & move or delete if necessary
if ((missing > 0) || (corrupt > 0)) {
if (!showFiles) {
if (missing > 0) {
out.print("\n" + path + ": MISSING " + missing
+ " blocks of total size " + missize + " B.");
}
if (corrupt > 0) {
out.print("\n" + path + ": CORRUPT " + corrupt
+ " blocks of total size " + corruptSize + " B.");
}
}
res.corruptFiles++;
if (isOpen) {
LOG.info("Fsck: ignoring open file " + path);
} else {
if (doMove) copyBlocksToLostFound(parent, file, blocks);
if (doDelete) deleteCorruptedFile(path);
}
}
if (showFiles) {
if (missing > 0 || corrupt > 0) {
if (missing > 0) {
out.print(" MISSING " + missing + " blocks of total size " +
missize + " B\n");
}
if (corrupt > 0) {
out.print(" CORRUPT " + corrupt + " blocks of total size " +
corruptSize + " B\n");
}
} else if (underReplicatedPerFile == 0 && misReplicatedPerFile == 0) {
out.print(" OK\n");
}
if (showBlocks) {
out.print(report.toString() + "\n");
}
}
}
private void countStorageTypeSummary(HdfsFileStatus file, LocatedBlock lBlk) {
StorageType[] storageTypes = lBlk.getStorageTypes();
storageTypeSummary.add(Arrays.copyOf(storageTypes, storageTypes.length),
namenode.getNamesystem().getBlockManager()
.getStoragePolicy(file.getStoragePolicy()));
}
private void deleteCorruptedFile(String path) {
try {
namenode.getRpcServer().delete(path, true);
LOG.info("Fsck: deleted corrupt file " + path);
} catch (Exception e) {
LOG.error("Fsck: error deleting corrupted file " + path, e);
internalError = true;
}
}
boolean hdfsPathExists(String path)
throws AccessControlException, UnresolvedLinkException, IOException {
try {
HdfsFileStatus hfs = namenode.getRpcServer().getFileInfo(path);
return (hfs != null);
} catch (FileNotFoundException e) {
return false;
}
}
private void copyBlocksToLostFound(String parent, HdfsFileStatus file,
LocatedBlocks blocks) throws IOException {
final DFSClient dfs = new DFSClient(NameNode.getAddress(conf), conf);
final String fullName = file.getFullName(parent);
OutputStream fos = null;
try {
if (!lfInited) {
lostFoundInit(dfs);
}
if (!lfInitedOk) {
throw new IOException("failed to initialize lost+found");
}
String target = lostFound + fullName;
if (hdfsPathExists(target)) {
LOG.warn("Fsck: can't copy the remains of " + fullName + " to " +
"lost+found, because " + target + " already exists.");
return;
}
if (!namenode.getRpcServer().mkdirs(
target, file.getPermission(), true)) {
throw new IOException("failed to create directory " + target);
}
// create chains
int chain = 0;
boolean copyError = false;
for (LocatedBlock lBlk : blocks.getLocatedBlocks()) {
LocatedBlock lblock = lBlk;
DatanodeInfo[] locs = lblock.getLocations();
if (locs == null || locs.length == 0) {
if (fos != null) {
fos.flush();
fos.close();
fos = null;
}
continue;
}
if (fos == null) {
fos = dfs.create(target + "/" + chain, true);
chain++;
}
// copy the block. It's a pity it's not abstracted from DFSInputStream ...
try {
copyBlock(dfs, lblock, fos);
} catch (Exception e) {
LOG.error("Fsck: could not copy block " + lblock.getBlock() +
" to " + target, e);
fos.flush();
fos.close();
fos = null;
internalError = true;
copyError = true;
}
}
if (copyError) {
LOG.warn("Fsck: there were errors copying the remains of the " +
"corrupted file " + fullName + " to /lost+found");
} else {
LOG.info("Fsck: copied the remains of the corrupted file " +
fullName + " to /lost+found");
}
} catch (Exception e) {
LOG.error("copyBlocksToLostFound: error processing " + fullName, e);
internalError = true;
} finally {
if (fos != null) fos.close();
dfs.close();
}
}
/*
* XXX (ab) Bulk of this method is copied verbatim from {@link DFSClient}, which is
* bad. Both places should be refactored to provide a method to copy blocks
* around.
*/
private void copyBlock(final DFSClient dfs, LocatedBlock lblock,
OutputStream fos) throws Exception {
int failures = 0;
InetSocketAddress targetAddr = null;
TreeSet<DatanodeInfo> deadNodes = new TreeSet<DatanodeInfo>();
BlockReader blockReader = null;
ExtendedBlock block = lblock.getBlock();
while (blockReader == null) {
DatanodeInfo chosenNode;
try {
chosenNode = bestNode(dfs, lblock.getLocations(), deadNodes);
targetAddr = NetUtils.createSocketAddr(chosenNode.getXferAddr());
} catch (IOException ie) {
if (failures >= HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT) {
throw new IOException("Could not obtain block " + lblock, ie);
}
LOG.info("Could not obtain block from any node: " + ie);
try {
Thread.sleep(10000);
} catch (InterruptedException iex) {
}
deadNodes.clear();
failures++;
continue;
}
try {
String file = BlockReaderFactory.getFileName(targetAddr,
block.getBlockPoolId(), block.getBlockId());
blockReader = new BlockReaderFactory(dfs.getConf()).
setFileName(file).
setBlock(block).
setBlockToken(lblock.getBlockToken()).
setStartOffset(0).
setLength(-1).
setVerifyChecksum(true).
setClientName("fsck").
setDatanodeInfo(chosenNode).
setInetSocketAddress(targetAddr).
setCachingStrategy(CachingStrategy.newDropBehind()).
setClientCacheContext(dfs.getClientContext()).
setConfiguration(namenode.conf).
setRemotePeerFactory(new RemotePeerFactory() {
@Override
public Peer newConnectedPeer(InetSocketAddress addr,
Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
throws IOException {
Peer peer = null;
Socket s = NetUtils.getDefaultSocketFactory(conf).createSocket();
try {
s.connect(addr, HdfsConstants.READ_TIMEOUT);
s.setSoTimeout(HdfsConstants.READ_TIMEOUT);
peer = TcpPeerServer.peerFromSocketAndKey(
dfs.getSaslDataTransferClient(), s, NamenodeFsck.this,
blockToken, datanodeId);
} finally {
if (peer == null) {
IOUtils.closeQuietly(s);
}
}
return peer;
}
}).
build();
} catch (IOException ex) {
// Put chosen node into dead list, continue
LOG.info("Failed to connect to " + targetAddr + ":" + ex);
deadNodes.add(chosenNode);
}
}
byte[] buf = new byte[1024];
int cnt = 0;
boolean success = true;
long bytesRead = 0;
try {
while ((cnt = blockReader.read(buf, 0, buf.length)) > 0) {
fos.write(buf, 0, cnt);
bytesRead += cnt;
}
if ( bytesRead != block.getNumBytes() ) {
throw new IOException("Recorded block size is " + block.getNumBytes() +
", but datanode returned " +bytesRead+" bytes");
}
} catch (Exception e) {
LOG.error("Error reading block", e);
success = false;
} finally {
blockReader.close();
}
if (!success) {
throw new Exception("Could not copy block data for " + lblock.getBlock());
}
}
@Override
public DataEncryptionKey newDataEncryptionKey() throws IOException {
return namenode.getRpcServer().getDataEncryptionKey();
}
/*
* XXX (ab) See comment above for copyBlock().
*
* Pick the best node from which to stream the data.
* That's the local one, if available.
*/
private DatanodeInfo bestNode(DFSClient dfs, DatanodeInfo[] nodes,
TreeSet<DatanodeInfo> deadNodes) throws IOException {
if ((nodes == null) ||
(nodes.length - deadNodes.size() < 1)) {
throw new IOException("No live nodes contain current block");
}
DatanodeInfo chosenNode;
do {
chosenNode = nodes[ThreadLocalRandom.current().nextInt(nodes.length)];
} while (deadNodes.contains(chosenNode));
return chosenNode;
}
private void lostFoundInit(DFSClient dfs) {
lfInited = true;
try {
String lfName = "/lost+found";
final HdfsFileStatus lfStatus = dfs.getFileInfo(lfName);
if (lfStatus == null) { // not exists
lfInitedOk = dfs.mkdirs(lfName, null, true);
lostFound = lfName;
} else if (!lfStatus.isDir()) { // exists but not a directory
LOG.warn("Cannot use /lost+found : a regular file with this name exists.");
lfInitedOk = false;
} else { // exists and is a directory
lostFound = lfName;
lfInitedOk = true;
}
} catch (Exception e) {
e.printStackTrace();
lfInitedOk = false;
}
if (lostFound == null) {
LOG.warn("Cannot initialize /lost+found .");
lfInitedOk = false;
internalError = true;
}
}
/**
* FsckResult of checking, plus overall DFS statistics.
*/
@VisibleForTesting
static class Result {
final List<String> missingIds = new ArrayList<String>();
long missingSize = 0L;
long corruptFiles = 0L;
long corruptBlocks = 0L;
long corruptSize = 0L;
long excessiveReplicas = 0L;
long missingReplicas = 0L;
long decommissionedReplicas = 0L;
long decommissioningReplicas = 0L;
long numUnderMinReplicatedBlocks=0L;
long numOverReplicatedBlocks = 0L;
long numUnderReplicatedBlocks = 0L;
long numMisReplicatedBlocks = 0L; // blocks that do not satisfy block placement policy
long numMinReplicatedBlocks = 0L; // minimally replicatedblocks
long totalBlocks = 0L;
long numExpectedReplicas = 0L;
long totalOpenFilesBlocks = 0L;
long totalFiles = 0L;
long totalOpenFiles = 0L;
long totalDirs = 0L;
long totalSymlinks = 0L;
long totalSize = 0L;
long totalOpenFilesSize = 0L;
long totalReplicas = 0L;
final short replication;
final int minReplication;
Result(Configuration conf) {
this.replication = (short)conf.getInt(DFSConfigKeys.DFS_REPLICATION_KEY,
DFSConfigKeys.DFS_REPLICATION_DEFAULT);
this.minReplication = (short)conf.getInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY,
DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_DEFAULT);
}
/**
* DFS is considered healthy if there are no missing blocks.
*/
boolean isHealthy() {
return ((missingIds.size() == 0) && (corruptBlocks == 0));
}
/** Add a missing block name, plus its size. */
void addMissing(String id, long size) {
missingIds.add(id);
missingSize += size;
}
/** Add a corrupt block. */
void addCorrupt(long size) {
corruptBlocks++;
corruptSize += size;
}
/** Return the actual replication factor. */
float getReplicationFactor() {
if (totalBlocks == 0)
return 0.0f;
return (float) (totalReplicas) / (float) totalBlocks;
}
@Override
public String toString() {
StringBuilder res = new StringBuilder();
res.append("Status: ").append((isHealthy() ? "HEALTHY" : "CORRUPT"))
.append("\n Total size:\t").append(totalSize).append(" B");
if (totalOpenFilesSize != 0) {
res.append(" (Total open files size: ").append(totalOpenFilesSize)
.append(" B)");
}
res.append("\n Total dirs:\t").append(totalDirs).append(
"\n Total files:\t").append(totalFiles);
res.append("\n Total symlinks:\t\t").append(totalSymlinks);
if (totalOpenFiles != 0) {
res.append(" (Files currently being written: ").append(totalOpenFiles)
.append(")");
}
res.append("\n Total blocks (validated):\t").append(totalBlocks);
if (totalBlocks > 0) {
res.append(" (avg. block size ").append((totalSize / totalBlocks))
.append(" B)");
}
if (totalOpenFilesBlocks != 0) {
res.append(" (Total open file blocks (not validated): ").append(
totalOpenFilesBlocks).append(")");
}
if (corruptFiles > 0 || numUnderMinReplicatedBlocks > 0) {
res.append("\n ********************************");
if(numUnderMinReplicatedBlocks>0){
res.append("\n UNDER MIN REPL'D BLOCKS:\t").append(numUnderMinReplicatedBlocks);
if(totalBlocks>0){
res.append(" (").append(
((float) (numUnderMinReplicatedBlocks * 100) / (float) totalBlocks))
.append(" %)");
}
res.append("\n ").append(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY + ":\t")
.append(minReplication);
}
if(corruptFiles>0) {
res.append(
"\n CORRUPT FILES:\t").append(corruptFiles);
if (missingSize > 0) {
res.append("\n MISSING BLOCKS:\t").append(missingIds.size()).append(
"\n MISSING SIZE:\t\t").append(missingSize).append(" B");
}
if (corruptBlocks > 0) {
res.append("\n CORRUPT BLOCKS: \t").append(corruptBlocks).append(
"\n CORRUPT SIZE:\t\t").append(corruptSize).append(" B");
}
}
res.append("\n ********************************");
}
res.append("\n Minimally replicated blocks:\t").append(
numMinReplicatedBlocks);
if (totalBlocks > 0) {
res.append(" (").append(
((float) (numMinReplicatedBlocks * 100) / (float) totalBlocks))
.append(" %)");
}
res.append("\n Over-replicated blocks:\t")
.append(numOverReplicatedBlocks);
if (totalBlocks > 0) {
res.append(" (").append(
((float) (numOverReplicatedBlocks * 100) / (float) totalBlocks))
.append(" %)");
}
res.append("\n Under-replicated blocks:\t").append(
numUnderReplicatedBlocks);
if (totalBlocks > 0) {
res.append(" (").append(
((float) (numUnderReplicatedBlocks * 100) / (float) totalBlocks))
.append(" %)");
}
res.append("\n Mis-replicated blocks:\t\t")
.append(numMisReplicatedBlocks);
if (totalBlocks > 0) {
res.append(" (").append(
((float) (numMisReplicatedBlocks * 100) / (float) totalBlocks))
.append(" %)");
}
res.append("\n Default replication factor:\t").append(replication)
.append("\n Average block replication:\t").append(
getReplicationFactor()).append("\n Missing blocks:\t\t").append(
missingIds.size()).append("\n Corrupt blocks:\t\t").append(
corruptBlocks).append("\n Missing replicas:\t\t").append(
missingReplicas);
if (totalReplicas > 0) {
res.append(" (").append(
((float) (missingReplicas * 100) / (float) numExpectedReplicas)).append(
" %)");
}
if (decommissionedReplicas > 0) {
res.append("\n DecommissionedReplicas:\t").append(
decommissionedReplicas);
}
if (decommissioningReplicas > 0) {
res.append("\n DecommissioningReplicas:\t").append(
decommissioningReplicas);
}
return res.toString();
}
}
}
| |
/*
* Copyright 2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.web.pages;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.Days;
import org.joda.time.Hours;
import org.joda.time.LocalDateTime;
import org.joda.time.Minutes;
import org.joda.time.ReadablePeriod;
import org.joda.time.Seconds;
import org.joda.time.format.DateTimeFormat;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import azkaban.app.AzkabanApplication;
import azkaban.app.JobDescriptor;
import azkaban.app.JobManager;
import azkaban.common.web.Page;
import azkaban.flow.ExecutableFlow;
import azkaban.flow.Flow;
import azkaban.flow.FlowManager;
import azkaban.jobs.JobExecutionException;
import azkaban.jobs.JobExecutorManager.ExecutingJobAndInstance;
import azkaban.util.json.JSONUtils;
import azkaban.web.AbstractAzkabanServlet;
/**
* The main page
*
* @author jkreps
*
*/
public class IndexServlet extends AbstractAzkabanServlet {
private static final Logger logger = Logger.getLogger(IndexServlet.class.getName());
private static final long serialVersionUID = 1;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException,
IOException {
/* set runtime properties from request and response */
super.setRuntimeProperties(req, resp);
AzkabanApplication app = getApplication();
@SuppressWarnings("unused")
Map<String, JobDescriptor> descriptors = app.getJobManager().loadJobDescriptors();
Page page = newPage(req, resp, "azkaban/web/pages/index.vm");
page.add("logDir", app.getLogDirectory());
page.add("flows", app.getAllFlows());
page.add("scheduled", app.getScheduleManager().getSchedule());
page.add("executing", app.getJobExecutorManager().getExecutingJobs());
page.add("completed", app.getJobExecutorManager().getCompleted());
page.add("rootJobNames", app.getAllFlows().getRootFlowNames());
page.add("folderNames", app.getAllFlows().getFolders());
page.add("jobDescComparator", JobDescriptor.NAME_COMPARATOR);
page.render();
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
/* set runtime properties from request and response */
super.setRuntimeProperties(req, resp);
AzkabanApplication app = getApplication();
String action = getParam(req, "action");
if ("loadjobs".equals(action)) {
resp.setContentType("application/json");
String folder = getParam(req, "folder");
resp.getWriter().print(getJSONJobsForFolder(app.getAllFlows(), folder));
resp.getWriter().flush();
return;
}
else if("unschedule".equals(action)) {
String jobid = getParam(req, "job");
app.getScheduleManager().removeScheduledJob(jobid);
} else if("cancel".equals(action)) {
cancelJob(app, req);
} else if("schedule".equals(action)) {
String redirect = scheduleJobs(app, req, resp);
if (!redirect.isEmpty()) {
resp.sendRedirect(redirect);
return;
}
} else {
throw new ServletException("Unknown action: " + action);
}
resp.sendRedirect(req.getContextPath());
}
@SuppressWarnings("unchecked")
private String getJSONJobsForFolder(FlowManager manager, String folder) {
List<String> rootJobs = manager.getRootNamesByFolder(folder);
Collections.sort(rootJobs);
JSONArray rootJobObj = new JSONArray();
for (String root: rootJobs) {
Flow flow = manager.getFlow(root);
JSONObject flowObj = getJSONDependencyTree(flow);
rootJobObj.add(flowObj);
}
return rootJobObj.toJSONString();
}
@SuppressWarnings("unchecked")
private JSONObject getJSONDependencyTree(Flow flow) {
JSONObject jobObject = new JSONObject();
jobObject.put("name", flow.getName());
if (flow.hasChildren()) {
JSONArray dependencies = new JSONArray();
for(Flow child : flow.getChildren()) {
JSONObject childObj = getJSONDependencyTree(child);
dependencies.add(childObj);
}
Collections.sort(dependencies, new FlowComparator());
jobObject.put("dep", dependencies);
}
return jobObject;
}
private class FlowComparator implements Comparator<JSONObject> {
@Override
public int compare(JSONObject arg0, JSONObject arg1) {
String first = (String)arg0.get("name");
String second = (String)arg1.get("name");
return first.compareTo(second);
}
}
private void cancelJob(AzkabanApplication app, HttpServletRequest req) throws ServletException {
String jobId = getParam(req, "job");
try {
app.getJobExecutorManager().cancel(jobId);
} catch (Exception e1) {
logger.error("Error cancelling job " + e1);
}
Collection<ExecutingJobAndInstance> executing = app.getJobExecutorManager().getExecutingJobs();
for(ExecutingJobAndInstance curr: executing) {
ExecutableFlow flow = curr.getExecutableFlow();
final String flowId = flow.getId();
if(flowId.equals(jobId)) {
final String flowName = flow.getName();
try {
if(flow.cancel()) {
addMessage(req, "Cancelled " + flowName);
logger.info("Job '" + flowName + "' cancelled from gui.");
} else {
logger.info("Couldn't cancel flow '" + flowName + "' for some reason.");
addError(req, "Failed to cancel flow " + flowName + ".");
}
} catch(Exception e) {
logger.error("Exception while attempting to cancel flow '" + flowName + "'.", e);
addError(req, "Failed to cancel flow " + flowName + ": " + e.getMessage());
}
}
}
}
private String scheduleJobs(AzkabanApplication app,
HttpServletRequest req,
HttpServletResponse resp) throws IOException, ServletException {
String[] jobNames = req.getParameterValues("jobs");
if(!hasParam(req, "jobs")) {
addError(req, "You must select at least one job to run.");
return "";
}
if (hasParam(req, "flow_now")) {
if (jobNames.length > 1) {
addError(req, "Can only run flow instance on one job.");
return "";
}
String jobName = jobNames[0];
JobManager jobManager = app.getJobManager();
JobDescriptor descriptor = jobManager.getJobDescriptor(jobName);
if (descriptor == null) {
addError(req, "Can only run flow instance on one job.");
return "";
}
else {
return req.getContextPath() + "/flow?job_id=" + jobName;
}
} else {
for(String job: jobNames) {
if(hasParam(req, "schedule")) {
int hour = getIntParam(req, "hour");
int minutes = getIntParam(req, "minutes");
boolean isPm = getParam(req, "am_pm").equalsIgnoreCase("pm");
String scheduledDate = req.getParameter("date");
DateTime day = null;
if(scheduledDate == null || scheduledDate.trim().length() == 0) {
day = new LocalDateTime().toDateTime();
} else {
try {
day = DateTimeFormat.forPattern("MM-dd-yyyy").parseDateTime(scheduledDate);
} catch(IllegalArgumentException e) {
addError(req, "Invalid date: '" + scheduledDate + "'");
return "";
}
}
ReadablePeriod thePeriod = null;
if(hasParam(req, "is_recurring"))
thePeriod = parsePeriod(req);
if(isPm && hour < 12)
hour += 12;
hour %= 24;
app.getScheduleManager().schedule(job,
day.withHourOfDay(hour)
.withMinuteOfHour(minutes)
.withSecondOfMinute(0),
thePeriod,
false);
addMessage(req, job + " scheduled.");
} else if(hasParam(req, "schedule_trigger")) {
String topic = getParam(req, "topic");
HashMap<String, String> criteria = new HashMap<String, String>();
for(int i = 0;; i++) {
String key = req.getParameter("key" + i);
String val = req.getParameter("val" + i);
if(key == null || val == null)
break;
if(key.length() > 0)
criteria.put(key, val);
}
String group = getParam(req, "group_id");
int startHour = getIntParam(req, "start_hour");
boolean startIsPm = getParam(req, "start_am_pm").equalsIgnoreCase("pm");
int stopHour = getIntParam(req, "stop_hour");
if(startIsPm) { startHour+=12; startHour%=24; }
boolean stopIsPm = getParam(req, "stop_am_pm").equalsIgnoreCase("pm");
if(stopIsPm) { stopHour+=12; stopHour%=24; }
app.getScheduleManager().schedule(job,
topic,
criteria,
group,
startHour,
stopHour,
false); //with dependencies.
addMessage(req, topic+" scheduled.");
} else if(hasParam(req, "run_now")) {
boolean ignoreDeps = !hasParam(req, "include_deps");
try {
app.getJobExecutorManager().execute(job, ignoreDeps);
}
catch (JobExecutionException e) {
addError(req, e.getMessage());
return "";
}
addMessage(req, "Running " + job);
}
else {
addError(req, "Neither run_now nor schedule param is set.");
}
}
return "";
}
}
private ReadablePeriod parsePeriod(HttpServletRequest req) throws ServletException {
int period = getIntParam(req, "period");
String periodUnits = getParam(req, "period_units");
if("d".equals(periodUnits))
return Days.days(period);
else if("h".equals(periodUnits))
return Hours.hours(period);
else if("m".equals(periodUnits))
return Minutes.minutes(period);
else if("s".equals(periodUnits))
return Seconds.seconds(period);
else
throw new ServletException("Unknown period unit: " + periodUnits);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.near;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import javax.cache.Cache;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cache.query.SqlQuery;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.Event;
import org.apache.ignite.events.EventType;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
import org.apache.ignite.internal.util.typedef.CAX;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheRebalanceMode.SYNC;
/**
* Test for distributed queries with node restarts.
*/
public class IgniteCacheQueryNodeRestartSelfTest extends GridCacheAbstractSelfTest {
/** */
private static final int GRID_CNT = 3;
/** */
private static final int KEY_CNT = 1000;
/** */
private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** {@inheritDoc} */
@Override protected int gridCount() {
return GRID_CNT;
}
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return 3 * 60 * 1000;
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration c = super.getConfiguration(igniteInstanceName);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
c.setDiscoverySpi(disco);
CacheConfiguration<?, ?> cc = defaultCacheConfiguration();
cc.setCacheMode(PARTITIONED);
cc.setBackups(1);
cc.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
cc.setAtomicityMode(TRANSACTIONAL);
cc.setRebalanceMode(SYNC);
cc.setAffinity(new RendezvousAffinityFunction(false, 15));
cc.setIndexedTypes(
Integer.class, Integer.class
);
c.setCacheConfiguration(cc);
return c;
}
/**
* JUnit.
*
* @throws Exception If failed.
*/
@SuppressWarnings({"TooBroadScope"})
public void testRestarts() throws Exception {
int duration = 60 * 1000;
int qryThreadNum = 10;
final long nodeLifeTime = 2 * 1000;
final int logFreq = 50;
final IgniteCache<Integer, Integer> cache = grid(0).cache(DEFAULT_CACHE_NAME);
assert cache != null;
for (int i = 0; i < KEY_CNT; i++)
cache.put(i, i);
assertEquals(KEY_CNT, cache.size());
final AtomicInteger qryCnt = new AtomicInteger();
final AtomicBoolean done = new AtomicBoolean();
IgniteInternalFuture<?> fut1 = multithreadedAsync(new CAX() {
@Override public void applyx() throws IgniteCheckedException {
while (!done.get()) {
Collection<Cache.Entry<Integer, Integer>> res =
cache.query(new SqlQuery<Integer, Integer>(Integer.class, "true")).getAll();
Set<Integer> keys = new HashSet<>();
for (Cache.Entry<Integer,Integer> entry : res)
keys.add(entry.getKey());
if (KEY_CNT > keys.size()) {
for (int i = 0; i < KEY_CNT; i++) {
if (!keys.contains(i))
assertEquals(Integer.valueOf(i), cache.get(i));
}
fail("res size: " + res.size());
}
assertEquals(KEY_CNT, keys.size());
int c = qryCnt.incrementAndGet();
if (c % logFreq == 0)
info("Executed queries: " + c);
}
}
}, qryThreadNum, "query-thread");
final AtomicInteger restartCnt = new AtomicInteger();
CollectingEventListener lsnr = new CollectingEventListener();
for (int i = 0; i < GRID_CNT; i++)
grid(i).events().localListen(lsnr, EventType.EVT_CACHE_REBALANCE_STOPPED);
IgniteInternalFuture<?> fut2 = multithreadedAsync(new Callable<Object>() {
@SuppressWarnings({"BusyWait"})
@Override public Object call() throws Exception {
while (!done.get()) {
int idx = GRID_CNT;
startGrid(idx);
Thread.sleep(nodeLifeTime);
stopGrid(idx);
int c = restartCnt.incrementAndGet();
if (c % logFreq == 0)
info("Node restarts: " + c);
}
return true;
}
}, 1, "restart-thread");
Thread.sleep(duration);
info("Stopping..");
done.set(true);
fut2.get();
info("Restarts stopped.");
fut1.get();
info("Queries stopped.");
info("Awaiting rebalance events [restartCnt=" + restartCnt.get() + ']');
boolean success = lsnr.awaitEvents(GRID_CNT * 2 * restartCnt.get(), 15000);
for (int i = 0; i < GRID_CNT; i++)
grid(i).events().stopLocalListen(lsnr, EventType.EVT_CACHE_REBALANCE_STOPPED);
assert success;
}
/** Listener that will wait for specified number of events received. */
private class CollectingEventListener implements IgnitePredicate<Event> {
/** Registered events count. */
private int evtCnt;
/** {@inheritDoc} */
@Override public synchronized boolean apply(Event evt) {
evtCnt++;
info("Processed event [evt=" + evt + ", evtCnt=" + evtCnt + ']');
notifyAll();
return true;
}
/**
* Waits until total number of events processed is equal or greater then argument passed.
*
* @param cnt Number of events to wait.
* @param timeout Timeout to wait.
* @return {@code True} if successfully waited, {@code false} if timeout happened.
* @throws InterruptedException If thread is interrupted.
*/
public synchronized boolean awaitEvents(int cnt, long timeout) throws InterruptedException {
long start = U.currentTimeMillis();
long now = start;
while (start + timeout > now) {
if (evtCnt >= cnt)
return true;
wait(start + timeout - now);
now = U.currentTimeMillis();
}
return false;
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import com.google.common.collect.Lists;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.IgnoreIndices;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
* A multi search API request.
*/
public class MultiSearchRequest extends ActionRequest<MultiSearchRequest> {
private List<SearchRequest> requests = Lists.newArrayList();
private IgnoreIndices ignoreIndices = IgnoreIndices.DEFAULT;
/**
* Add a search request to execute. Note, the order is important, the search response will be returned in the
* same order as the search requests.
*/
public MultiSearchRequest add(SearchRequestBuilder request) {
requests.add(request.request());
return this;
}
/**
* Add a search request to execute. Note, the order is important, the search response will be returned in the
* same order as the search requests.
*/
public MultiSearchRequest add(SearchRequest request) {
requests.add(request);
return this;
}
public MultiSearchRequest add(byte[] data, int from, int length, boolean contentUnsafe,
@Nullable String[] indices, @Nullable String[] types, @Nullable String searchType) throws Exception {
return add(new BytesArray(data, from, length), contentUnsafe, indices, types, searchType, null, IgnoreIndices.NONE, true);
}
public MultiSearchRequest add(BytesReference data, boolean contentUnsafe, @Nullable String[] indices, @Nullable String[] types, @Nullable String searchType, IgnoreIndices ignoreIndices) throws Exception {
return add(data, contentUnsafe, indices, types, searchType, null, ignoreIndices, true);
}
public MultiSearchRequest add(BytesReference data, boolean contentUnsafe, @Nullable String[] indices, @Nullable String[] types, @Nullable String searchType, @Nullable String routing, IgnoreIndices ignoreIndices, boolean allowExplicitIndex) throws Exception {
XContent xContent = XContentFactory.xContent(data);
int from = 0;
int length = data.length();
byte marker = xContent.streamSeparator();
while (true) {
int nextMarker = findNextMarker(marker, from, data, length);
if (nextMarker == -1) {
break;
}
// support first line with \n
if (nextMarker == 0) {
from = nextMarker + 1;
continue;
}
SearchRequest searchRequest = new SearchRequest();
if (indices != null) {
searchRequest.indices(indices);
}
if (ignoreIndices != null) {
searchRequest.ignoreIndices(ignoreIndices);
}
if (types != null && types.length > 0) {
searchRequest.types(types);
}
if (routing != null) {
searchRequest.routing(routing);
}
searchRequest.searchType(searchType);
// now parse the action
if (nextMarker - from > 0) {
XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from));
try {
// Move to START_OBJECT, if token is null, its an empty data
XContentParser.Token token = parser.nextToken();
if (token != null) {
assert token == XContentParser.Token.START_OBJECT;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("index".equals(currentFieldName) || "indices".equals(currentFieldName)) {
if (!allowExplicitIndex) {
throw new ElasticSearchIllegalArgumentException("explicit index in multi search is not allowed");
}
searchRequest.indices(Strings.splitStringByCommaToArray(parser.text()));
} else if ("type".equals(currentFieldName) || "types".equals(currentFieldName)) {
searchRequest.types(Strings.splitStringByCommaToArray(parser.text()));
} else if ("search_type".equals(currentFieldName) || "searchType".equals(currentFieldName)) {
searchRequest.searchType(parser.text());
} else if ("preference".equals(currentFieldName)) {
searchRequest.preference(parser.text());
} else if ("routing".equals(currentFieldName)) {
searchRequest.routing(parser.text());
} else if ("ignore_indices".equals(currentFieldName) || "ignoreIndices".equals(currentFieldName)) {
searchRequest.ignoreIndices(IgnoreIndices.fromString(parser.text()));
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("index".equals(currentFieldName) || "indices".equals(currentFieldName)) {
if (!allowExplicitIndex) {
throw new ElasticSearchIllegalArgumentException("explicit index in multi search is not allowed");
}
searchRequest.indices(parseArray(parser));
} else if ("type".equals(currentFieldName) || "types".equals(currentFieldName)) {
searchRequest.types(parseArray(parser));
} else {
throw new ElasticSearchParseException(currentFieldName + " doesn't support arrays");
}
}
}
}
} finally {
parser.close();
}
}
// move pointers
from = nextMarker + 1;
// now for the body
nextMarker = findNextMarker(marker, from, data, length);
if (nextMarker == -1) {
break;
}
searchRequest.source(data.slice(from, nextMarker - from), contentUnsafe);
// move pointers
from = nextMarker + 1;
add(searchRequest);
}
return this;
}
private String[] parseArray(XContentParser parser) throws IOException {
final List<String> list = new ArrayList<String>();
assert parser.currentToken() == XContentParser.Token.START_ARRAY;
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
list.add(parser.text());
}
return list.toArray(new String[list.size()]);
}
private int findNextMarker(byte marker, int from, BytesReference data, int length) {
for (int i = from; i < length; i++) {
if (data.get(i) == marker) {
return i;
}
}
return -1;
}
public List<SearchRequest> requests() {
return this.requests;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (requests.isEmpty()) {
validationException = addValidationError("no requests added", validationException);
}
for (int i = 0; i < requests.size(); i++) {
ActionRequestValidationException ex = requests.get(i).validate();
if (ex != null) {
if (validationException == null) {
validationException = new ActionRequestValidationException();
}
validationException.addValidationErrors(ex.validationErrors());
}
}
return validationException;
}
public IgnoreIndices ignoreIndices() {
return ignoreIndices;
}
public MultiSearchRequest ignoreIndices(IgnoreIndices ignoreIndices) {
this.ignoreIndices = ignoreIndices;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
SearchRequest request = new SearchRequest();
request.readFrom(in);
requests.add(request);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(requests.size());
for (SearchRequest request : requests) {
request.writeTo(out);
}
}
}
| |
package com.libreworks.stellarbase.web;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.ArrayType;
import com.google.common.collect.ImmutableSet;
import com.libreworks.stellarbase.persistence.Pagination;
import com.libreworks.stellarbase.text.Patterns;
import com.libreworks.stellarbase.text.Strings;
import com.libreworks.stellarbase.util.Arguments;
public class PaginationFactory
{
private static final String DESC = "desc";
private static final String SORT = "sort";
private static final String ORDER = "order";
private static final String PAGE = "page";
private static final String START_PAGE = "startPage";
private static final String START_INDEX = "startIndex";
private static final String START = "start";
private static final String COUNT = "count";
private static final String MAX = "max";
private static final String LIMIT = "limit";
private static final String OFFSET = "offset";
private static final String HEADER_RANGE = "Range";
private static final Pattern RANGE = Pattern.compile("^items=(\\d+)-(\\d+)$");
private static final Pattern DOJO_SORT = Pattern.compile("^sort\\((.*)\\)$");
private static final Set<String> MAX_ALIAS = ImmutableSet.of(MAX, LIMIT, COUNT);
private static final Set<String> OFFSET_ALIAS = ImmutableSet.of(START, OFFSET);
private static final Set<String> PAGE_ALIAS = ImmutableSet.of(PAGE, START_PAGE);
private ObjectMapper objectMapper;
private ArrayType extSortArrayType;
public PaginationFactory()
{
this(new ObjectMapper());
}
public PaginationFactory(ObjectMapper objectMapper)
{
this.objectMapper = Arguments.checkNull(objectMapper);
this.extSortArrayType = objectMapper.getTypeFactory().constructArrayType(ExtSort.class);
}
public Pagination create(HttpServletRequest request)
{
return create(request, SORT);
}
public Pagination create(HttpServletRequest request, String sortParameter)
{
Integer offset = 0;
Integer max = Integer.MAX_VALUE;
@SuppressWarnings("unchecked")
Set<String> params = ImmutableSet.copyOf(Collections.<String>list(request.getParameterNames()));
// dojo.store.JsonRest - Range: items=0-24
// Grails - &max=25&offset=0
// dojox.data.QueryReadStore - &count=25&start=0
// OpenSearch - &count=25&startIndex=1
// OpenSearch w/ page - &count=25&startPage=1
// Spring Data REST - &limit=25&page=1
// ExtJS - &limit=25&start=0
String range = request.getHeader(HEADER_RANGE);
Matcher rm = range == null ? null : RANGE.matcher(range);
if (rm != null && rm.matches()) {
// dojo.store.JsonRest style
offset = Integer.valueOf(rm.group(1));
max = Integer.valueOf(rm.group(2)) - offset + 1;
} else {
Integer maxVal = parse(MAX_ALIAS, request, Integer.MAX_VALUE);
if (Integer.MAX_VALUE != maxVal.intValue()) {
max = maxVal;
}
// Grails, ExtJS, dojox.data.QueryReadStore, all zero-based
Integer offVal = parse(OFFSET_ALIAS, request, 0);
if (offVal > 0) {
offset = offVal;
}
if (params.contains(START_INDEX) || params.contains(START_PAGE) || params.contains(PAGE)) {
// OpenSearch style, 1-based
Integer startIdx = parse(request.getParameter(START_INDEX), 0);
// OpenSearch or Spring Data style, 1-based
Integer startPage = parse(PAGE_ALIAS, request, 0);
if (startIdx > 0) {
offset = startIdx - 1;
} else if (startPage > 0) {
offset = (max * (startPage - 1));
}
}
}
Map<String,Boolean> order = new LinkedHashMap<String,Boolean>();
// formats:
// Dojo - &sort(+foo,-bar)
// Dojo w/field - &[field]=+foo,-bar
// OpenSearchServer - &sort=foo&sort=-bar
// OpenSearch extension - &sort=foo:ascending&sort=bar:descending
// Grails - &sort=foo&order=asc
// Spring Data REST - &sort=foo,asc&sort=bar,desc
// ExtJS JSON - &sort=[{"property":"foo","direction":"asc"},{"property":"bar","direction":"desc"}]
if (params.contains(sortParameter)) {
if (params.contains(ORDER)) {
// stupid Grails ordering
order.put(request.getParameter(sortParameter),
!DESC.equalsIgnoreCase(request.getParameter(ORDER)));
} else {
String[] values = request.getParameterValues(sortParameter);
for (String s : values) {
parseSort(s, order);
}
}
} else {
for (String s : params) {
Matcher sm = DOJO_SORT.matcher(s);
if (sm.matches()) {
// +foo,-bar
parseSort(sm.group(1), order);
}
}
}
return Pagination.create(max, offset, order);
}
private void parseSort(String sort, Map<String,Boolean> sorts)
{
if (Strings.isBlank(sort)) {
return;
}
if (sort.startsWith("[")) {
// it might be the ridiculous JSON ExtJS sort format
try {
for(ExtSort s : objectMapper.<ExtSort[]>readValue(sort, extSortArrayType)) {
sorts.put(s.getProperty(), !DESC.equalsIgnoreCase(s.getDirection()));
}
return;
} catch (Exception e) {
// No, it's not
}
}
if (sort.endsWith(",asc")) {
// foo,asc
sorts.put(sort.substring(0, sort.length() - 4), Boolean.TRUE);
} else if (sort.endsWith(",desc")) {
// foo,desc
sorts.put(sort.substring(0, sort.length() - 5), Boolean.FALSE);
} else if (sort.endsWith(":ascending")) {
// foo:ascending
sorts.put(sort.substring(0, sort.length() - 10), Boolean.TRUE);
} else if (sort.endsWith(":descending")) {
// foo:descending
sorts.put(sort.substring(0, sort.length() - 11), Boolean.FALSE);
} else {
for (String s : Patterns.COMMA.split(sort)) {
if (s.startsWith(Strings.DASH)) {
// -foo
sorts.put(s.substring(1), Boolean.FALSE);
} else if (s.startsWith(Strings.PLUS)) {
// +foo
sorts.put(s.substring(1), Boolean.TRUE);
} else {
// foo
sorts.put(s, Boolean.TRUE);
}
}
}
}
private static Integer parse(Set<String> names, HttpServletRequest request, Integer defaultValue)
{
for (String name : names) {
String value = request.getParameter(name);
Integer parsed = parseOrNull(value);
if (parsed != null) {
return Integer.valueOf(value);
}
}
return defaultValue;
}
private static Integer parse(String value, Integer defaultValue)
{
Integer parsed = parseOrNull(value);
return parsed == null ? defaultValue : parsed;
}
private static Integer parseOrNull(String value)
{
if (value != null) {
try {
return Integer.valueOf(value.trim());
} catch (NumberFormatException e) {
}
}
return null;
}
public static class ExtSort
{
private String property;
private String direction;
public ExtSort()
{
}
public String getDirection()
{
return direction;
}
public String getProperty()
{
return property;
}
public void setDirection(String direction)
{
this.direction = direction;
}
public void setProperty(String property)
{
this.property = property;
}
}
}
| |
/*L
* Copyright Georgetown University, Washington University.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cab2b/LICENSE.txt for details.
*/
package edu.wustl.cab2b.server.path.pathgen;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger;
/**
* Computes all possible paths present in a directed graph. No path returned
* should contain a cycle. Suppose the graph is (V, E) where V is the set of
* vertices and E is the set of edges. A source-dest pair is represented as
* <code>i->j</code>.<br>
* The algorithm is as follows : <br>
* For each pair of nodes <code>{i, j : i ∈ V, j ∈ V, i ≠ j}</code>
* in the graph, call <code>getPaths(i->j, {})</code>. Self-edges (a
* self-edge is a path of the form <code>i->i</code>) are then added to the
* resulting set of paths.<br>
* <code>getPaths()</code> is the method where the core of the algorithm
* resides. Suppose<code>
* P(i->j, N) </code>is the set of paths about to be
* returned from <code>getPaths()</code>.<br>
* Following is what happens on a call <code>getPaths(i->j, N)</code>, where
* N is the ignoredNodesSet : <br>
* <ol>
* <li>Let <code>X =
* {@link edu.wustl.cab2b.server.path.pathgen.GraphPathFinderCache#getPathsOnIgnoringNodes(SourceDestinationPair, Set)}
* with (i->j, N)</code>
* as parameters; if <code>X != null</code>, then <code>P(i->j, N) =
* X</code>;
* return <code>P(i->j, N)</code>. Else continue.
* <li>If <code>i->j ∈ E</code> then add a path <code>i->j</code> to
* <code>P(i->j, N)</code>.</li>
* <li>Let
* <code>K = {k : k ∈ V, k ≠ i, k ≠ j, k ∈ N, i->k ∈ E)</code>.
* For each <code>k ∈ K</code>, do the following : <br>
* <ol>
* <li>Call <code>getPaths (k->j, N ∪ {i})</code>. Suppose the returned
* set of paths is R.</li>
* <li>For each path <code>Rx (0 < x < |R|) in R</code>, add the path
* <code>i->Rx to P(i->j, N)</code>.</li>
* </ol>
* <li>Add <code>P(i->j, N)</code> to the cache.
* <li>Return <code>P(i->j, N)</code></li>
* </ol>
* @author srinath_k
*/
public class GraphPathFinder {
private static final Logger logger = edu.wustl.common.util.logger.Logger.getLogger(GraphPathFinder.class);
/** Constant for memory cache */
public static boolean MEM_CACHE = true;
private Graph inputGraph;
private GraphPathFinderCache cache;
private void wrapup() {
this.inputGraph = null;
this.cache.cleanup();
this.cache = null;
}
private GraphPathFinderCache getCache() {
return cache;
}
/**
*
* @param adjacencyMatrix
* @param replicationNodes
* @param conn
* @return
*/
public Set<Path> getAllPaths(boolean[][] adjacencyMatrix, Map<Integer, Set<Integer>> replicationNodes,
Connection conn) {
return getAllPaths(adjacencyMatrix, replicationNodes, conn, Integer.MAX_VALUE);
}
/**
* Returns all paths
* @param adjacencyMatrix
* @param replicationNodes
* @param conn
* @param maxLength
* @return
* @throws IllegalArgumentException if <tt>maxLength < 2</tt>
*/
public Set<Path> getAllPaths(boolean[][] adjacencyMatrix, Map<Integer, Set<Integer>> replicationNodes,
Connection conn, int maxLength) {
logger.info("Entered GraphPathFinder...");
if (maxLength < 2) {
throw new IllegalArgumentException("maxLength should be atleast 2.");
}
long startTime = System.currentTimeMillis();
// init
this.inputGraph = new Graph(adjacencyMatrix);
if (MEM_CACHE) {
this.cache = new MemoryCache();
} else {
this.cache = new DatabaseCache(conn);
}
// end init
Node[] allNodes = this.inputGraph.allNodes().toArray(new Node[0]);
int numPaths = 0;
for (Node srcNode : allNodes) {
logger.debug("Processing " + srcNode);
for (Node destNode : allNodes) {
if (srcNode.equals(destNode)) {
// don't process self-edges now...
continue;
}
SourceDestinationPair sdp = new SourceDestinationPair(srcNode, destNode);
// logger.info("Processing " + srcNode + " to " + destNode);
Set<Path> srcDestPaths = getPaths(sdp, new HashSet<Node>(), maxLength);
numPaths += srcDestPaths.size();
// if (KEEP_WRITING) {
// PathToFileWriter.APPEND = true;
// PathToFileWriter.writePathsToFile(srcDestPaths, null);
// }
}
}
Set<Path> result = new HashSet<Path>();
// process self edges
for (Node node : allNodes) {
if (isEdgePresent(node, node)) {
result.add(new Path(node, node));
++numPaths;
}
}
// add other paths
result.addAll(getCache().getAllPaths());
wrapup();
result = PathReplicationUtil.replicatePaths(result, replicationNodes);
long endTime = System.currentTimeMillis();
logger.info("Time taken GraphPathFinder : " + (endTime - startTime));
logger.info("Exiting GraphPathFinder.");
return result;
}
private Set<Path> getPaths(SourceDestinationPair sdp, Set<Node> nodesToIgnore, final int maxLength) {
Node srcNode = sdp.getSrcNode();
Node destNode = sdp.getDestNode();
Set<Path> res = new HashSet<Path>();
// see if there are paths calculated already...
Set<Path> cachedPaths = getCache().getPathsOnIgnoringNodes(sdp, nodesToIgnore);
if (cachedPaths != null) {
res.addAll(cachedPaths);
return res;
}
Set<Node> interNodes = new HashSet<Node>(this.inputGraph.allNodes());
interNodes.remove(srcNode);
interNodes.remove(destNode);
interNodes.removeAll(nodesToIgnore);
if (isEdgePresent(srcNode, destNode)) {
res.add(new Path(srcNode, destNode));
}
Set<Node> nodesToIgnoreNext = new HashSet<Node>(nodesToIgnore);
nodesToIgnoreNext.add(srcNode);
for (Node interNode : interNodes) {
if (isEdgePresent(srcNode, interNode)) {
Set<Path> pathsFromInterToDest = getPaths(new SourceDestinationPair(interNode, destNode),
nodesToIgnoreNext, maxLength);
for (Path pathFromInterToDest : pathsFromInterToDest) {
if (pathFromInterToDest.numNodes() == maxLength) {
continue;
}
List<Node> intermediateNodes = new ArrayList<Node>();
intermediateNodes.add(interNode);
intermediateNodes.addAll(pathFromInterToDest.getIntermediateNodes());
Path resPath = new Path(srcNode, destNode, intermediateNodes);
res.add(resPath);
}
}
}
addEntryToCache(sdp, nodesToIgnore, res);
//
return res;
}
private void addEntryToCache(SourceDestinationPair sdp, Set<Node> nodesToIgnore, Set<Path> res) {
getCache().addEntry(sdp, nodesToIgnore, res);
}
private boolean isEdgePresent(Node srcNode, Node destNode) {
return this.inputGraph.isEdgePresent(srcNode, destNode);
}
}
| |
/**
Copyright 2008 University of Rochester
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package edu.ur.hibernate.ir.user.db;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Restrictions;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import edu.ur.hibernate.HbCrudDAO;
import edu.ur.hibernate.HbHelper;
import edu.ur.ir.user.IrUser;
import edu.ur.ir.user.IrUserDAO;
import edu.ur.order.OrderType;
/**
* Persistence for a user.
*
* @author Nathan Sarr
*
*/
public class HbIrUserDAO implements IrUserDAO {
/** eclipse generated id */
private static final long serialVersionUID = -3904788289887401360L;
/** hibernate helper */
private final HbCrudDAO<IrUser> hbCrudDAO;
/**
* Default Constructor
*/
public HbIrUserDAO() {
hbCrudDAO = new HbCrudDAO<IrUser>(IrUser.class);
}
/**
* Set the session factory.
*
* @param sessionFactory
*/
public void setSessionFactory(SessionFactory sessionFactory)
{
hbCrudDAO.setSessionFactory(sessionFactory);
}
/**
* Get a count of the users in the system
*
* @see edu.ur.CountableDAO#getCount()
*/
public Long getCount() {
return (Long)HbHelper.getUnique(hbCrudDAO.getHibernateTemplate().findByNamedQuery("userCount"));
}
/**
* Get all users in user name order.
*
* @see edu.ur.NameListDAO#getAllNameOrder()
*/
@SuppressWarnings("unchecked")
public List<IrUser> getAllNameOrder() {
DetachedCriteria dc = DetachedCriteria.forClass(IrUser.class);
dc.addOrder(Order.asc("name"));
return (List<IrUser>) hbCrudDAO.getHibernateTemplate().findByCriteria(dc);
}
/**
* Get all users in name order.
*
* @see edu.ur.NameListDAO#getAllOrderByName(int, int)
*/
public List<IrUser> getAllOrderByName(int startRecord, int numRecords) {
return hbCrudDAO.getByQuery("getAllUserNameAsc", startRecord, numRecords);
}
/**
* Find the user by unique name.
*
* @see edu.ur.UniqueNameDAO#findByUniqueName(java.lang.String)
*/
public IrUser findByUniqueName(String username) {
return (IrUser)
HbHelper.getUnique(hbCrudDAO.getHibernateTemplate().findByNamedQuery("getUserByName", username.toLowerCase()));
}
/**
* Load the ir user found by the user name. This is coupled to the Acegi security.
*
* @see org.acegisecurity.userdetails.UserDetailsService#loadUserByUsername(java.lang.String)
*/
public IrUser loadUserByUsername(String username) throws UsernameNotFoundException
{
IrUser irUser = findByUniqueName(username);
if (irUser == null)
{
throw new UsernameNotFoundException("User with user name " + username + " could not be found");
}
return irUser;
}
@SuppressWarnings("unchecked")
public List getAll() {
return hbCrudDAO.getAll();
}
public IrUser getById(Long id, boolean lock) {
return hbCrudDAO.getById(id, lock);
}
public void makePersistent(IrUser entity) {
hbCrudDAO.makePersistent(entity);
}
public void makeTransient(IrUser entity) {
hbCrudDAO.makeTransient(entity);
}
/**
* Get the users.
*
* @see edu.ur.ir.user.IrUserDAO#getUsers(java.util.List)
*/
@SuppressWarnings("unchecked")
public List<IrUser> getUsers(final List<Long> userIds) {
List<IrUser> foundUsers = new LinkedList<IrUser>();
if( userIds.size() > 0 )
{
foundUsers = (List<IrUser>) hbCrudDAO.getHibernateTemplate().execute(new HibernateCallback() {
public Object doInHibernate(Session session)
throws HibernateException, SQLException {
Criteria criteria = session.createCriteria(hbCrudDAO.getClazz());
criteria.add(Restrictions.in("id",userIds));
return criteria.list();
}
});
}
return foundUsers;
}
/**
* Load the ir user found by the token.
*
* @see edu.ur.ir.user.IrUserDAO#getUserByToken(java.lang.String)
*/
public IrUser getUserByToken(String token)
{
return (IrUser)
HbHelper.getUnique(hbCrudDAO.getHibernateTemplate().findByNamedQuery("getUserByToken", token));
}
/**
* Get users whose affiliation approval is pending
*
* @see edu.ur.ir.user.IrUserDAO#getUsersPendingAffiliationApproval()
*/
@SuppressWarnings("unchecked")
public List<IrUser> getUsersPendingAffiliationApproval()
{
return (List<IrUser>) hbCrudDAO.getHibernateTemplate().findByNamedQuery("getUsersPendingAffiliationApproval");
}
/**
* @see edu.ur.ir.user.IrUserDAO#getUsersPendingAffiliationApprovalCount()
*/
public Long getUsersPendingAffiliationApprovalCount() {
Long count = (Long) hbCrudDAO.getHibernateTemplate().execute(new HibernateCallback() {
public Object doInHibernate(Session session)
throws HibernateException, SQLException {
Query q = session.getNamedQuery("getPendingApprovalsCount");
return q.uniqueResult();
}
});
return count;
}
/**
* @see edu.ur.ir.user.IrUserDAO#getUsersPendingAffiliationApprovals(int, int, String)
*/
@SuppressWarnings("unchecked")
public List<IrUser> getUsersPendingAffiliationApprovals(final int rowStart,
final int numberOfResultsToShow, final String sortType) {
List<IrUser> users =
(List<IrUser>) hbCrudDAO.getHibernateTemplate().execute(new HibernateCallback() {
public Object doInHibernate(Session session)
throws HibernateException, SQLException {
Query q = null;
if (sortType.equalsIgnoreCase("asc")) {
q = session.getNamedQuery("getPendingApprovalsOrderByNameAsc");
} else {
q = session.getNamedQuery("getPendingApprovalsOrderByNameDesc");
}
q.setFirstResult(rowStart);
q.setMaxResults(numberOfResultsToShow);
q.setReadOnly(true);
q.setFetchSize(numberOfResultsToShow);
return q.list();
}
});
return users;
}
/**
* Get the user having the specified role Id
*
* @see edu.ur.ir.user.IrUserDAO#getUserByRole(Long)
*/
@SuppressWarnings("unchecked")
public List<IrUser> getUserByRole(String roleName) {
return hbCrudDAO.getHibernateTemplate().findByNamedQuery("getUserByRole", roleName);
}
/**
* Get user having the specified person name authority
*
* @param personNameAuthorityId Id of person name authority
* @return User
*/
public IrUser getUserByPersonNameAuthority(Long personNameAuthorityId) {
return (IrUser)
HbHelper.getUnique(hbCrudDAO.getHibernateTemplate().findByNamedQuery("getUserByPersonNameAuthority", personNameAuthorityId));
}
/**
* Get a list of users for a specified sort criteria.
*
* @param rowStart - Start row to fetch the data from
* @param numberOfResultsToShow - maximum number of results to fetch
* @param sortElement - column to sort on
* @param sortType - The order to sort by (ascending/descending)
*
* @return List of users
*/
@SuppressWarnings("unchecked")
public List<IrUser> getUsers(final int rowStart,
final int numberOfResultsToShow, final String sortElement, final OrderType orderType) {
List<IrUser> users = new LinkedList<IrUser>();
users = (List<IrUser>) hbCrudDAO.getHibernateTemplate().execute(new HibernateCallback()
{
public Object doInHibernate(Session session) throws HibernateException, SQLException
{
Query q = null;
if( sortElement.equalsIgnoreCase("lastName") && orderType.equals(OrderType.ASCENDING_ORDER))
{
q = session.getNamedQuery("getUsersByLastNameOrderAsc");
} else if ( sortElement.equalsIgnoreCase("lastName") && orderType.equals(OrderType.DESCENDING_ORDER)){
q = session.getNamedQuery("getUsersByLastNameOrderDesc");
} else if ( sortElement.equalsIgnoreCase("username") && orderType.equals(OrderType.ASCENDING_ORDER)){
q = session.getNamedQuery("getUsersByUserNameOrderAsc");
} else if ( sortElement.equalsIgnoreCase("username") && orderType.equals(OrderType.DESCENDING_ORDER)){
q = session.getNamedQuery("getUsersByUserNameOrderDesc");
} else if ( sortElement.equalsIgnoreCase("email") && orderType.equals(OrderType.ASCENDING_ORDER)){
q = session.getNamedQuery("getUsersByEmailOrderAsc");
} else if ( sortElement.equalsIgnoreCase("email") && orderType.equals(OrderType.DESCENDING_ORDER)){
q = session.getNamedQuery("getUsersByEmailOrderDesc");
}
q.setFirstResult(rowStart);
q.setMaxResults(numberOfResultsToShow);
q.setFetchSize(numberOfResultsToShow);
return q.list();
}
});
return users;
}
}
| |
// $Id: FromElementFactory.java 9586 2006-03-09 21:11:44Z steve.ebersole@jboss.com $
package org.hibernate.hql.ast.tree;
import org.hibernate.engine.JoinSequence;
import org.hibernate.hql.antlr.SqlTokenTypes;
import org.hibernate.hql.ast.util.ASTUtil;
import org.hibernate.hql.ast.util.AliasGenerator;
import org.hibernate.hql.ast.util.PathHelper;
import org.hibernate.hql.ast.util.SessionFactoryHelper;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Joinable;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.JoinFragment;
import org.hibernate.type.AssociationType;
import org.hibernate.type.CollectionType;
import org.hibernate.type.EntityType;
import org.hibernate.type.Type;
import org.hibernate.util.StringHelper;
import antlr.ASTFactory;
import antlr.SemanticException;
import antlr.collections.AST;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Encapsulates the creation of FromElements and JoinSequences.
*
* @author josh Oct 12, 2004 4:54:25 AM
*/
class FromElementFactory implements SqlTokenTypes {
private static final Log log = LogFactory.getLog( FromElementFactory.class );
private FromClause fromClause;
private FromElement origin;
private String path;
private String classAlias;
private String[] columns;
private boolean implied;
private boolean inElementsFunction;
private boolean collection;
private QueryableCollection queryableCollection;
private CollectionType collectionType;
/**
* Creates entity from elements.
*/
public FromElementFactory(FromClause fromClause, FromElement origin, String path) {
this.fromClause = fromClause;
this.origin = origin;
this.path = path;
collection = false;
}
/**
* Creates collection from elements.
*/
public FromElementFactory(
FromClause fromClause,
FromElement origin,
String path,
String classAlias,
String[] columns,
boolean implied) {
this( fromClause, origin, path );
this.classAlias = classAlias;
this.columns = columns;
this.implied = implied;
collection = true;
}
FromElement addFromElement() throws SemanticException {
FromClause parentFromClause = fromClause.getParentFromClause();
if ( parentFromClause != null ) {
// Look up class name using the first identifier in the path.
String pathAlias = PathHelper.getAlias( path );
FromElement parentFromElement = parentFromClause.getFromElement( pathAlias );
if ( parentFromElement != null ) {
return createFromElementInSubselect( path, pathAlias, parentFromElement, classAlias );
}
}
EntityPersister entityPersister = fromClause.getSessionFactoryHelper().requireClassPersister( path );
FromElement elem = createAndAddFromElement( path,
classAlias,
entityPersister,
( EntityType ) ( ( Queryable ) entityPersister ).getType(),
null );
// Add to the query spaces.
fromClause.getWalker().addQuerySpaces( entityPersister.getQuerySpaces() );
return elem;
}
private FromElement createFromElementInSubselect(
String path,
String pathAlias,
FromElement parentFromElement,
String classAlias) throws SemanticException {
if ( log.isDebugEnabled() ) {
log.debug( "createFromElementInSubselect() : path = " + path );
}
// Create an DotNode AST for the path and resolve it.
FromElement fromElement = evaluateFromElementPath( path, classAlias );
EntityPersister entityPersister = fromElement.getEntityPersister();
// If the first identifier in the path referrs to the class alias (not the class name), then this
// is a correlated subselect. If it's a correlated sub-select, use the existing table alias. Otherwise
// generate a new one.
String tableAlias = null;
boolean correlatedSubselect = pathAlias.equals( parentFromElement.getClassAlias() );
if ( correlatedSubselect ) {
tableAlias = fromElement.getTableAlias();
}
else {
tableAlias = null;
}
// If the from element isn't in the same clause, create a new from element.
if ( fromElement.getFromClause() != fromClause ) {
if ( log.isDebugEnabled() ) {
log.debug( "createFromElementInSubselect() : creating a new FROM element..." );
}
fromElement = createFromElement( entityPersister );
initializeAndAddFromElement( fromElement,
path,
classAlias,
entityPersister,
( EntityType ) ( ( Queryable ) entityPersister ).getType(),
tableAlias
);
}
if ( log.isDebugEnabled() ) {
log.debug( "createFromElementInSubselect() : " + path + " -> " + fromElement );
}
return fromElement;
}
private FromElement evaluateFromElementPath(String path, String classAlias) throws SemanticException {
ASTFactory factory = fromClause.getASTFactory();
FromReferenceNode pathNode = ( FromReferenceNode ) PathHelper.parsePath( path, factory );
pathNode.recursiveResolve( FromReferenceNode.ROOT_LEVEL, // This is the root level node.
false, // Generate an explicit from clause at the root.
classAlias,
null
);
if ( pathNode.getImpliedJoin() != null ) {
return pathNode.getImpliedJoin();
}
else {
return pathNode.getFromElement();
}
}
FromElement createCollectionElementsJoin(
QueryableCollection queryableCollection,
String collectionName) throws SemanticException {
JoinSequence collectionJoinSequence = fromClause.getSessionFactoryHelper()
.createCollectionJoinSequence( queryableCollection, collectionName );
this.queryableCollection = queryableCollection;
return createCollectionJoin( collectionJoinSequence, null );
}
FromElement createCollection(
QueryableCollection queryableCollection,
String role,
int joinType,
boolean fetchFlag,
boolean indexed)
throws SemanticException {
if ( !collection ) {
throw new IllegalStateException( "FromElementFactory not initialized for collections!" );
}
this.inElementsFunction = indexed;
FromElement elem;
this.queryableCollection = queryableCollection;
collectionType = queryableCollection.getCollectionType();
String roleAlias = fromClause.getAliasGenerator().createName( role );
// Correlated subqueries create 'special' implied from nodes
// because correlated subselects can't use an ANSI-style join
boolean explicitSubqueryFromElement = fromClause.isSubQuery() && !implied;
if ( explicitSubqueryFromElement ) {
String pathRoot = StringHelper.root( path );
FromElement origin = fromClause.getFromElement( pathRoot );
if ( origin == null || origin.getFromClause() != fromClause ) {
implied = true;
}
}
// super-duper-classic-parser-regression-testing-mojo-magic...
if ( explicitSubqueryFromElement && DotNode.useThetaStyleImplicitJoins ) {
implied = true;
}
Type elementType = queryableCollection.getElementType();
if ( elementType.isEntityType() ) { // A collection of entities...
elem = createEntityAssociation( role, roleAlias, joinType );
}
else if ( elementType.isComponentType() ) { // A collection of components...
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
elem = createCollectionJoin( joinSequence, roleAlias );
}
else { // A collection of scalar elements...
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
elem = createCollectionJoin( joinSequence, roleAlias );
}
elem.setRole( role );
elem.setQueryableCollection( queryableCollection );
// Don't include sub-classes for implied collection joins or subquery joins.
if ( implied ) {
elem.setIncludeSubclasses( false );
}
if ( explicitSubqueryFromElement ) {
elem.setInProjectionList( true ); // Treat explict from elements in sub-queries properly.
}
if ( fetchFlag ) {
elem.setFetch( true );
}
return elem;
}
FromElement createEntityJoin(
String entityClass,
String tableAlias,
JoinSequence joinSequence,
boolean fetchFlag,
boolean inFrom,
EntityType type) throws SemanticException {
FromElement elem = createJoin( entityClass, tableAlias, joinSequence, type, false );
elem.setFetch( fetchFlag );
EntityPersister entityPersister = elem.getEntityPersister();
int numberOfTables = entityPersister.getQuerySpaces().length;
if ( numberOfTables > 1 && implied && !elem.useFromFragment() ) {
if ( log.isDebugEnabled() ) {
log.debug( "createEntityJoin() : Implied multi-table entity join" );
}
elem.setUseFromFragment( true );
}
// If this is an implied join in a FROM clause, then use ANSI-style joining, and set the
// flag on the FromElement that indicates that it was implied in the FROM clause itself.
if ( implied && inFrom ) {
joinSequence.setUseThetaStyle( false );
elem.setUseFromFragment( true );
elem.setImpliedInFromClause( true );
}
if ( elem.getWalker().isSubQuery() ) {
// two conditions where we need to transform this to a theta-join syntax:
// 1) 'elem' is the "root from-element" in correlated subqueries
// 2) The DotNode.useThetaStyleImplicitJoins has been set to true
// and 'elem' represents an implicit join
if ( elem.getFromClause() != elem.getOrigin().getFromClause() ||
// ( implied && DotNode.useThetaStyleImplicitJoins ) ) {
DotNode.useThetaStyleImplicitJoins ) {
// the "root from-element" in correlated subqueries do need this piece
elem.setType( FROM_FRAGMENT );
joinSequence.setUseThetaStyle( true );
elem.setUseFromFragment( false );
}
}
return elem;
}
FromElement createElementJoin(QueryableCollection queryableCollection) throws SemanticException {
FromElement elem;
implied = true; //TODO: always true for now, but not if we later decide to support elements() in the from clause
inElementsFunction = true;
Type elementType = queryableCollection.getElementType();
if ( !elementType.isEntityType() ) {
throw new IllegalArgumentException( "Cannot create element join for a collection of non-entities!" );
}
this.queryableCollection = queryableCollection;
SessionFactoryHelper sfh = fromClause.getSessionFactoryHelper();
FromElement destination = null;
String tableAlias = null;
EntityPersister entityPersister = queryableCollection.getElementPersister();
tableAlias = fromClause.getAliasGenerator().createName( entityPersister.getEntityName() );
String associatedEntityName = entityPersister.getEntityName();
EntityPersister targetEntityPersister = sfh.requireClassPersister( associatedEntityName );
// Create the FROM element for the target (the elements of the collection).
destination = createAndAddFromElement(
associatedEntityName,
classAlias,
targetEntityPersister,
( EntityType ) queryableCollection.getElementType(),
tableAlias
);
// If the join is implied, then don't include sub-classes on the element.
if ( implied ) {
destination.setIncludeSubclasses( false );
}
fromClause.addCollectionJoinFromElementByPath( path, destination );
// origin.addDestination(destination);
// Add the query spaces.
fromClause.getWalker().addQuerySpaces( entityPersister.getQuerySpaces() );
CollectionType type = queryableCollection.getCollectionType();
String role = type.getRole();
String roleAlias = origin.getTableAlias();
String[] targetColumns = sfh.getCollectionElementColumns( role, roleAlias );
AssociationType elementAssociationType = sfh.getElementAssociationType( type );
// Create the join element under the from element.
int joinType = JoinFragment.INNER_JOIN;
JoinSequence joinSequence = sfh.createJoinSequence( implied, elementAssociationType, tableAlias, joinType, targetColumns );
elem = initializeJoin( path, destination, joinSequence, targetColumns, origin, false );
elem.setUseFromFragment( true ); // The associated entity is implied, but it must be included in the FROM.
elem.setCollectionTableAlias( roleAlias ); // The collection alias is the role.
return elem;
}
private FromElement createCollectionJoin(JoinSequence collectionJoinSequence, String tableAlias) throws SemanticException {
String text = queryableCollection.getTableName();
AST ast = createFromElement( text );
FromElement destination = ( FromElement ) ast;
Type elementType = queryableCollection.getElementType();
if ( elementType.isCollectionType() ) {
throw new SemanticException( "Collections of collections are not supported!" );
}
destination.initializeCollection( fromClause, classAlias, tableAlias );
destination.setType( JOIN_FRAGMENT ); // Tag this node as a JOIN.
destination.setIncludeSubclasses( false ); // Don't include subclasses in the join.
destination.setCollectionJoin( true ); // This is a clollection join.
destination.setJoinSequence( collectionJoinSequence );
destination.setOrigin( origin, false );
destination.setCollectionTableAlias(tableAlias);
// origin.addDestination( destination );
// This was the cause of HHH-242
// origin.setType( FROM_FRAGMENT ); // Set the parent node type so that the AST is properly formed.
origin.setText( "" ); // The destination node will have all the FROM text.
origin.setCollectionJoin( true ); // The parent node is a collection join too (voodoo - see JoinProcessor)
fromClause.addCollectionJoinFromElementByPath( path, destination );
fromClause.getWalker().addQuerySpaces( queryableCollection.getCollectionSpaces() );
return destination;
}
private FromElement createEntityAssociation(
String role,
String roleAlias,
int joinType) throws SemanticException {
FromElement elem;
Queryable entityPersister = ( Queryable ) queryableCollection.getElementPersister();
String associatedEntityName = entityPersister.getEntityName();
// Get the class name of the associated entity.
if ( queryableCollection.isOneToMany() ) {
if ( log.isDebugEnabled() ) {
log.debug( "createEntityAssociation() : One to many - path = " + path + " role = " + role + " associatedEntityName = " + associatedEntityName );
}
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
elem = createJoin( associatedEntityName, roleAlias, joinSequence, ( EntityType ) queryableCollection.getElementType(), false );
}
else {
if ( log.isDebugEnabled() ) {
log.debug( "createManyToMany() : path = " + path + " role = " + role + " associatedEntityName = " + associatedEntityName );
}
elem = createManyToMany( role, associatedEntityName,
roleAlias, entityPersister, ( EntityType ) queryableCollection.getElementType(), joinType );
fromClause.getWalker().addQuerySpaces( queryableCollection.getCollectionSpaces() );
}
elem.setCollectionTableAlias( roleAlias );
return elem;
}
private FromElement createJoin(
String entityClass,
String tableAlias,
JoinSequence joinSequence,
EntityType type,
boolean manyToMany) throws SemanticException {
// origin, path, implied, columns, classAlias,
EntityPersister entityPersister = fromClause.getSessionFactoryHelper().requireClassPersister( entityClass );
FromElement destination = createAndAddFromElement( entityClass,
classAlias,
entityPersister,
type,
tableAlias );
return initializeJoin( path, destination, joinSequence, getColumns(), origin, manyToMany );
}
private FromElement createManyToMany(
String role,
String associatedEntityName,
String roleAlias,
Queryable entityPersister,
EntityType type,
int joinType) throws SemanticException {
FromElement elem;
SessionFactoryHelper sfh = fromClause.getSessionFactoryHelper();
if ( inElementsFunction /*implied*/ ) {
// For implied many-to-many, just add the end join.
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
elem = createJoin( associatedEntityName, roleAlias, joinSequence, type, true );
}
else {
// For an explicit many-to-many relationship, add a second join from the intermediate
// (many-to-many) table to the destination table. Also, make sure that the from element's
// idea of the destination is the destination table.
String tableAlias = fromClause.getAliasGenerator().createName( entityPersister.getEntityName() );
String[] secondJoinColumns = sfh.getCollectionElementColumns( role, roleAlias );
// Add the second join, the one that ends in the destination table.
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
joinSequence.addJoin( sfh.getElementAssociationType( collectionType ), tableAlias, joinType, secondJoinColumns );
elem = createJoin( associatedEntityName, tableAlias, joinSequence, type, false );
elem.setUseFromFragment( true );
}
return elem;
}
private JoinSequence createJoinSequence(String roleAlias, int joinType) {
SessionFactoryHelper sessionFactoryHelper = fromClause.getSessionFactoryHelper();
String[] joinColumns = getColumns();
if ( collectionType == null ) {
throw new IllegalStateException( "collectionType is null!" );
}
return sessionFactoryHelper.createJoinSequence( implied, collectionType, roleAlias, joinType, joinColumns );
}
private FromElement createAndAddFromElement(
String className,
String classAlias,
EntityPersister entityPersister,
EntityType type,
String tableAlias) {
if ( !( entityPersister instanceof Joinable ) ) {
throw new IllegalArgumentException( "EntityPersister " + entityPersister + " does not implement Joinable!" );
}
FromElement element = createFromElement( entityPersister );
initializeAndAddFromElement( element, className, classAlias, entityPersister, type, tableAlias );
return element;
}
private void initializeAndAddFromElement(
FromElement element,
String className,
String classAlias,
EntityPersister entityPersister,
EntityType type,
String tableAlias) {
if ( tableAlias == null ) {
AliasGenerator aliasGenerator = fromClause.getAliasGenerator();
tableAlias = aliasGenerator.createName( entityPersister.getEntityName() );
}
element.initializeEntity( fromClause, className, entityPersister, type, classAlias, tableAlias );
}
private FromElement createFromElement(EntityPersister entityPersister) {
Joinable joinable = ( Joinable ) entityPersister;
String text = joinable.getTableName();
AST ast = createFromElement( text );
FromElement element = ( FromElement ) ast;
return element;
}
private AST createFromElement(String text) {
AST ast = ASTUtil.create( fromClause.getASTFactory(),
implied ? IMPLIED_FROM : FROM_FRAGMENT, // This causes the factory to instantiate the desired class.
text );
// Reset the node type, because the rest of the system is expecting FROM_FRAGMENT, all we wanted was
// for the factory to create the right sub-class. This might get reset again later on anyway to make the
// SQL generation simpler.
ast.setType( FROM_FRAGMENT );
return ast;
}
private FromElement initializeJoin(
String path,
FromElement destination,
JoinSequence joinSequence,
String[] columns,
FromElement origin,
boolean manyToMany) {
destination.setType( JOIN_FRAGMENT );
destination.setJoinSequence( joinSequence );
destination.setColumns( columns );
destination.setOrigin( origin, manyToMany );
fromClause.addJoinByPathMap( path, destination );
return destination;
}
private String[] getColumns() {
if ( columns == null ) {
throw new IllegalStateException( "No foriegn key columns were supplied!" );
}
return columns;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
/**
* TaskAttemptID represents the immutable and unique identifier for a task
* attempt. Each task attempt is one particular instance of a Map or Reduce Task
* identified by its TaskID.
*
* TaskAttemptID consists of 2 parts. First part is the {@link TaskID}, that
* this TaskAttemptID belongs to. Second part is the task attempt number. <br>
* An example TaskAttemptID is :
* <code>attempt_200707121733_0003_m_000005_0</code> , which represents the
* zeroth task attempt for the fifth map task in the third job running at the
* jobtracker started at <code>200707121733</code>.
* <p>
* Applications should never construct or parse TaskAttemptID strings , but
* rather use appropriate constructors or {@link #forName(String)} method.
*
* @see JobID
* @see TaskID
*/
public class TaskAttemptID extends ID {
private static final String ATTEMPT = "attempt";
private TaskID taskId;
private static final char UNDERSCORE = '_';
/**
* Constructs a TaskAttemptID object from given {@link TaskID}.
*
* @param taskId
* TaskID that this task belongs to
* @param id
* the task attempt number
*/
public TaskAttemptID(TaskID taskId, int id) {
super(id);
if (taskId == null) {
throw new IllegalArgumentException("taskId cannot be null");
}
this.taskId = taskId;
}
/**
* Constructs a TaskId object from given parts.
*
* @param jtIdentifier
* jobTracker identifier
* @param jobId
* job number
* @param isMap
* whether the tip is a map
* @param taskId
* taskId number
* @param id
* the task attempt number
*/
public TaskAttemptID(String jtIdentifier, int jobId, boolean isMap, int taskId, int id) {
this(new TaskID(jtIdentifier, jobId, isMap, taskId), id);
}
private TaskAttemptID() {
}
/** Returns the {@link JobID} object that this task attempt belongs to */
public JobID getJobID() {
return taskId.getJobID();
}
/** Returns the {@link TaskID} object that this task attempt belongs to */
public TaskID getTaskID() {
return taskId;
}
/** Returns whether this TaskAttemptID is a map ID */
public boolean isMap() {
return taskId.isMap();
}
@Override
public boolean equals(Object o) {
if (o == null)
return false;
if (o.getClass().equals(TaskAttemptID.class)) {
TaskAttemptID that = (TaskAttemptID) o;
return this.id == that.id && this.taskId.equals(that.taskId);
} else
return false;
}
/** Compare TaskIds by first tipIds, then by task numbers. */
@Override
public int compareTo(ID o) {
TaskAttemptID that = (TaskAttemptID) o;
int tipComp = this.taskId.compareTo(that.taskId);
if (tipComp == 0) {
return this.id - that.id;
} else
return tipComp;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
return builder.append(ATTEMPT).append(UNDERSCORE).append(toStringWOPrefix()).toString();
}
StringBuilder toStringWOPrefix() {
StringBuilder builder = new StringBuilder();
return builder.append(taskId.toStringWOPrefix()).append(UNDERSCORE).append(id);
}
@Override
public int hashCode() {
return toStringWOPrefix().toString().hashCode();
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
this.taskId = TaskID.read(in);
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
taskId.write(out);
}
public static TaskAttemptID read(DataInput in) throws IOException {
TaskAttemptID taskId = new TaskAttemptID();
taskId.readFields(in);
return taskId;
}
/**
* Construct a TaskAttemptID object from given string
*
* @return constructed TaskAttemptID object or null if the given String is
* null
* @throws IllegalArgumentException
* if the given string is malformed
*/
public static TaskAttemptID forName(String str) throws IllegalArgumentException {
if (str == null)
return null;
try {
String[] parts = str.split("_");
if (parts.length == 6) {
if (parts[0].equals(ATTEMPT)) {
boolean isMap = false;
if (parts[3].equals("m"))
isMap = true;
else if (parts[3].equals("r"))
isMap = false;
else
throw new Exception();
return new TaskAttemptID(parts[1], Integer.parseInt(parts[2]), isMap, Integer.parseInt(parts[4]),
Integer.parseInt(parts[5]));
}
}
} catch (Exception ex) {// fall below
}
throw new IllegalArgumentException("TaskAttemptId string : " + str + " is not properly formed");
}
/**
* Returns a regex pattern which matches task attempt IDs. Arguments can be
* given null, in which case that part of the regex will be generic. For
* example to obtain a regex matching <i>all task attempt IDs</i> of <i>any
* jobtracker</i>, in <i>any job</i>, of the <i>first map task</i>, we would
* use :
*
* <pre>
* TaskAttemptID.getTaskAttemptIDsPattern(null, null, true, 1, null);
* </pre>
*
* which will return :
*
* <pre>
* "attempt_[ˆ_]*_[0-9]*_m_000001_[0-9]*"
* </pre>
*
* @param jtIdentifier
* jobTracker identifier, or null
* @param jobId
* job number, or null
* @param isMap
* whether the tip is a map, or null
* @param taskId
* taskId number, or null
* @param attemptId
* the task attempt number, or null
* @return a regex pattern matching TaskAttemptIDs
*/
public static String getTaskAttemptIDsPattern(String jtIdentifier, Integer jobId, Boolean isMap, Integer taskId,
Integer attemptId) {
StringBuilder builder = new StringBuilder(ATTEMPT).append(UNDERSCORE);
builder.append(getTaskAttemptIDsPatternWOPrefix(jtIdentifier, jobId, isMap, taskId, attemptId));
return builder.toString();
}
static StringBuilder getTaskAttemptIDsPatternWOPrefix(String jtIdentifier, Integer jobId, Boolean isMap,
Integer taskId, Integer attemptId) {
StringBuilder builder = new StringBuilder();
builder.append(TaskID.getTaskIDsPatternWOPrefix(jtIdentifier, jobId, isMap, taskId)).append(UNDERSCORE)
.append(attemptId != null ? attemptId : "[0-9]*");
return builder;
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.service.graph;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Supplier;
import org.jgrapht.graph.AbstractBaseGraph;
import org.jgrapht.graph.DefaultGraphType;
/**
* Basic graph implementation for a directed graph whose vertices and edges support attributes.
* <P>
* The graph can be configured as to how to handle multiple edges with the same source and destination
* vertices. One option is to simply allow multiple edges. The second option is to collapse
* duplicate edges such that there is only ever one edge with the same
* source and destination. In this case, each additional duplicate edge added will cause the
* edge to have a "Weight" attribute that will be the total number of edges that were added
* to the same source/destination vertex pair.
*/
public class AttributedGraph extends AbstractBaseGraph<AttributedVertex, AttributedEdge> {
public static final String WEIGHT = "Weight";
private Map<String, AttributedVertex> vertexMap = new HashMap<>();
private final boolean collapseDuplicateEdges;
private String name;
private GraphType type;
private String description;
/**
* Create a new empty AttributedGraph that automatically collapses duplicate edges
*
* @param name the name of the graph
* @param type the {@link GraphType} which defines valid vertex and edge types.
*/
public AttributedGraph(String name, GraphType type) {
this(name, type, name, true);
}
/**
* Create a new empty AttributedGraph that automatically collapses duplicate edges
*
* @param name the name of the graph
* @param type the {@link GraphType} which defines valid vertex and edge types.
* @param description a description of the graph
*/
public AttributedGraph(String name, GraphType type, String description) {
this(name, type, description, true);
}
/**
* Create a new empty AttributedGraph.
*
* @param name the name of the graph
* @param type the {@link GraphType} which defines valid vertex and edge types.
* @param description a description of the graph
* @param collapseDuplicateEdges if true, duplicate edges will be collapsed into a single
* edge with a "Weight" attribute whose value is the number of edges between those vertices.
*/
public AttributedGraph(String name, GraphType type, String description,
boolean collapseDuplicateEdges) {
super(new VertexSupplier(), new EdgeSupplier(), DefaultGraphType.directedPseudograph());
this.name = name;
this.type = type;
this.description = description;
this.collapseDuplicateEdges = collapseDuplicateEdges;
}
/**
* Returns the name of the graph
* @return the name of the graph
*/
public String getName() {
return name;
}
/**
* Returns a description of the graph
* @return a description of the graph
*/
public String getDescription() {
return description;
}
/**
* Returns the {@link GraphType} for this graph
* @return the {@link GraphType} for this graph
*/
public GraphType getGraphType() {
return type;
}
/**
* Adds a new vertex with the given id. The vertex's name will be the same as the id.
* If a vertex already exists with that id,
* then that vertex will be returned.
*
* @param id the unique vertex id that the graph should have a vertex for.
* @return either an existing vertex with that id, or a newly added vertex with that id
*/
public AttributedVertex addVertex(String id) {
return addVertex(id, id);
}
/**
* Adds a new vertex with the given id and name. If a vertex already exists with that id,
* then that vertex will be returned, but with its name changed to the given name.
*
* @param id the unique vertex id that the graph should have a vertex for.
* @param name the name to associate with this vertex
* @return either an existing vertex with that id, or a newly added vertex with that id
*/
public AttributedVertex addVertex(String id, String name) {
if (vertexMap.containsKey(id)) {
AttributedVertex vertex = vertexMap.get(id);
vertex.setName(name);
return vertex;
}
AttributedVertex newVertex = new AttributedVertex(id, name);
addVertex(newVertex);
return newVertex;
}
@Override
public AttributedVertex addVertex() {
AttributedVertex vertex = super.addVertex();
vertexMap.put(vertex.getId(), vertex);
return vertex;
}
@Override
public boolean addVertex(AttributedVertex vertex) {
if (super.addVertex(vertex)) {
vertexMap.put(vertex.getId(), vertex);
return true;
}
return false;
}
/**
* Creates and adds a new directed edge with the given id between the given source and
* target vertices. If the graph is set to collapse duplicate edges and an edge for that
* source and target exists, then the existing edge will be return with its "Weight" attribute
* set to the total number of edges that have been added between the source and target vertices.
*
* @param source the source vertex of the directed edge to be created.
* @param target the target vertex of the directed edge to be created.
* @param edgeId the id to use for the new edge. Note: if this is a duplicate and edges
* are being collapsed, then this edgeId will not be used.
* @return a new edge between the source and target if it is the first one or the graph is
* not collapsing edges. Otherwise, an existing edge with its "Weight" attribute set accordingly.
*/
public AttributedEdge addEdge(AttributedVertex source, AttributedVertex target, String edgeId) {
AttributedEdge basicEdge = new AttributedEdge(edgeId);
addEdge(source, target, basicEdge);
return basicEdge;
}
/**
* Creates and adds a new directed edge with the given edge object. If the graph is set to
* collapse duplicate edges and an edge for that
* source and target exists, then the existing edge will be return with its "Weight" attribute
* set to the total number of edges that have been added between the source and target vertices.
*
* @param source the source vertex of the directed edge to be created.
* @param target the target vertex of the directed edge to be created.
* @param edge the BasicEdge object to use for the new edge. Note: if this is a duplicate and
* edges are being collapsed, then this edge object will not be used.
* @return true if the edge was added. Note that if this graph is collapsing duplicate edges, then
* it will always return true.
*/
@Override
public boolean addEdge(AttributedVertex source, AttributedVertex target, AttributedEdge edge) {
ensureInGraph(source);
ensureInGraph(target);
if (collapseDuplicateEdges) {
AttributedEdge existingEdge = getEdge(source, target);
if (existingEdge != null) {
incrementWeightProperty(existingEdge);
return true;
}
}
return super.addEdge(source, target, edge);
}
/**
* Creates and adds a new directed edge between the given source and
* target vertices. If the graph is set to collapse duplicate edges and an edge for that
* source and target exists, then the existing edge will be return with its "Weight" attribute
* set to the total number of edges that have been added between the source and target vertices.
*
* @param source the source vertex of the directed edge to be created.
* @param target the target vertex of the directed edge to be created.
* @return a new edge between the source and target if it is the first one or the graph is
* not collapsing edges. Otherwise, an existing edge with its "Weight" attribute set accordingly.
*/
@Override
public AttributedEdge addEdge(AttributedVertex source, AttributedVertex target) {
ensureInGraph(source);
ensureInGraph(target);
if (collapseDuplicateEdges) {
AttributedEdge edge = getEdge(source, target);
if (edge != null) {
incrementWeightProperty(edge);
return edge;
}
}
return super.addEdge(source, target);
}
/**
* Returns the total number of edges in the graph
* @return the total number of edges in the graph
*/
public int getEdgeCount() {
return edgeSet().size();
}
/**
* Returns the total number of vertices in the graph
* @return the total number of vertices in the graph
*/
public int getVertexCount() {
return vertexSet().size();
}
/**
* Returns the vertex with the given vertex id
* @param vertexId the id of the vertex to retrieve
* @return the vertex with the given vertex id or null if none found
*/
public AttributedVertex getVertex(String vertexId) {
return vertexMap.get(vertexId);
}
private void ensureInGraph(AttributedVertex vertex) {
if (!containsVertex(vertex)) {
addVertex(vertex);
}
}
private static void incrementWeightProperty(AttributedEdge edge) {
if (edge.hasAttribute(WEIGHT)) {
String weightString = edge.getAttribute(WEIGHT);
edge.setAttribute(WEIGHT, incrementWeightStringValue(weightString));
}
else {
edge.setAttribute(WEIGHT, "2");
}
}
private static String incrementWeightStringValue(String value) {
int weight = Integer.parseInt(value);
weight++;
return Integer.toString(weight);
}
/**
* Default VertexSupplier that uses a simple one up number for default vertex ids
*/
private static class VertexSupplier implements Supplier<AttributedVertex> {
long nextId = 1;
@Override
public AttributedVertex get() {
return new AttributedVertex(Long.toString(nextId++));
}
}
/**
* Default EdgeSupplier that uses a simple one up number for default edge ids
*/
private static class EdgeSupplier implements Supplier<AttributedEdge> {
long nextId = 1;
@Override
public AttributedEdge get() {
return new AttributedEdge(Long.toString(nextId++));
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.plugins.groovy;
import com.intellij.codeInsight.actions.ReformatCodeProcessor;
import com.intellij.codeInsight.completion.*;
import com.intellij.codeInsight.completion.impl.NegatingComparable;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.codeInsight.lookup.LookupElementWeigher;
import com.intellij.icons.AllIcons;
import com.intellij.lang.xml.XMLLanguage;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlTag;
import com.intellij.psi.xml.XmlText;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.xml.impl.GenericDomValueReference;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.maven.dom.MavenVersionComparable;
import org.jetbrains.idea.maven.dom.converters.MavenDependencyCompletionUtil;
import org.jetbrains.idea.maven.indices.MavenProjectIndicesManager;
import org.jetbrains.idea.maven.model.MavenConstants;
import org.jetbrains.idea.maven.project.MavenProject;
import org.jetbrains.idea.maven.project.MavenProjectsManager;
import org.jetbrains.idea.maven.utils.library.RepositoryLibraryDescription;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrMethodCall;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.path.GrMethodCallExpression;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* @author Vladislav.Soroka
* @since 8/30/2016
*/
public class MavenGroovyPomCompletionContributor extends CompletionContributor {
public static final Key<VirtualFile> ORIGINAL_POM_FILE = Key.create("ORIGINAL_POM_FILE");
@Override
public void fillCompletionVariants(@NotNull CompletionParameters parameters, @NotNull CompletionResultSet result) {
final PsiElement position = parameters.getPosition();
if (!(position instanceof LeafElement)) return;
Project project = position.getProject();
VirtualFile virtualFile = parameters.getOriginalFile().getVirtualFile();
if(virtualFile == null) return;
MavenProject mavenProject = MavenProjectsManager.getInstance(project).findProject(virtualFile);
if (mavenProject == null) return;
List<String> methodCallInfo = MavenGroovyPomUtil.getGroovyMethodCalls(position);
if (methodCallInfo.isEmpty()) return;
StringBuilder buf = new StringBuilder();
for (String s : methodCallInfo) {
buf.append('<').append(s).append('>');
}
for (String s : ContainerUtil.reverse(methodCallInfo)) {
buf.append('<').append(s).append("/>");
}
PsiFile psiFile = PsiFileFactory.getInstance(project).createFileFromText(MavenConstants.POM_XML, XMLLanguage.INSTANCE, buf);
psiFile.putUserData(ORIGINAL_POM_FILE, virtualFile);
List<Object> variants = ContainerUtil.newArrayList();
String lastMethodCall = ContainerUtil.getLastItem(methodCallInfo);
Ref<Boolean> completeDependency = Ref.create(false);
Ref<Boolean> completeVersion = Ref.create(false);
psiFile.accept(new PsiRecursiveElementVisitor(true) {
@Override
public void visitElement(PsiElement element) {
super.visitElement(element);
if (!completeDependency.get() && element.getParent() instanceof XmlTag &&
"dependency".equals(((XmlTag)element.getParent()).getName())) {
if ("artifactId".equals(lastMethodCall) || "groupId".equals(lastMethodCall)) {
completeDependency.set(true);
}
else if ("version".equals(lastMethodCall) || "dependency".equals(lastMethodCall)) {
completeVersion.set(true);
//completeDependency.set(true);
}
}
if (!completeDependency.get() && !completeVersion.get()) {
PsiReference[] references = getReferences(element);
for (PsiReference each : references) {
if (each instanceof GenericDomValueReference) {
Collections.addAll(variants, each.getVariants());
}
}
}
}
});
for (Object variant : variants) {
if (variant instanceof LookupElement) {
result.addElement((LookupElement)variant);
}
else {
result.addElement(LookupElementBuilder.create(variant));
}
}
if (completeDependency.get()) {
MavenProjectIndicesManager indicesManager = MavenProjectIndicesManager.getInstance(project);
for (String groupId : indicesManager.getGroupIds()) {
for (String artifactId : indicesManager.getArtifactIds(groupId)) {
LookupElement builder = LookupElementBuilder.create(groupId + ':' + artifactId)
.withIcon(AllIcons.Nodes.PpLib).withInsertHandler(MavenDependencyInsertHandler.INSTANCE);
result.addElement(builder);
}
}
}
if (completeVersion.get()) {
consumeDependencyElement(position, closableBlock -> {
String groupId = null;
String artifactId = null;
for (GrMethodCall methodCall : PsiTreeUtil.findChildrenOfType(closableBlock, GrMethodCall.class)) {
GroovyPsiElement[] arguments = methodCall.getArgumentList().getAllArguments();
if (arguments.length != 1) continue;
PsiReference reference = arguments[0].getReference();
if (reference == null) continue;
String callExpression = methodCall.getInvokedExpression().getText();
String argumentValue = reference.getCanonicalText();
if ("groupId".equals(callExpression)) {
groupId = argumentValue;
}
else if ("artifactId".equals(callExpression)) {
artifactId = argumentValue;
}
}
completeVersions(result, project, groupId, artifactId, "");
}, element -> {
if (element.getParent() instanceof PsiLiteral) {
Object value = ((PsiLiteral)element.getParent()).getValue();
if (value == null) return;
String[] mavenCoordinates = value.toString().split(":");
if (mavenCoordinates.length < 3) return;
String prefix = mavenCoordinates[0] + ':' + mavenCoordinates[1] + ':';
completeVersions(result, project, mavenCoordinates[0], mavenCoordinates[1], prefix);
}
});
}
}
private static void completeVersions(@NotNull CompletionResultSet completionResultSet,
@NotNull Project project,
@Nullable String groupId,
@Nullable String artifactId,
@NotNull String prefix) {
if (StringUtil.isEmptyOrSpaces(artifactId)) return;
CompletionResultSet newResultSet = completionResultSet.withRelevanceSorter(CompletionService.getCompletionService().emptySorter().weigh(
new LookupElementWeigher("mavenVersionWeigher") {
@Nullable
@Override
public Comparable weigh(@NotNull LookupElement element) {
return new NegatingComparable(new MavenVersionComparable(StringUtil.trimStart(element.getLookupString(), prefix)));
}
}));
MavenProjectIndicesManager indicesManager = MavenProjectIndicesManager.getInstance(project);
Set<String> versions;
if (StringUtil.isEmptyOrSpaces(groupId)) {
versions = Collections.emptySet();
//if (!(coordinates instanceof MavenDomPlugin)) return;
//
//versions = indicesManager.getVersions(MavenArtifactUtil.DEFAULT_GROUPS[0], artifactId);
//for (int i = 0; i < MavenArtifactUtil.DEFAULT_GROUPS.length; i++) {
// versions = Sets.union(versions, indicesManager.getVersions(MavenArtifactUtil.DEFAULT_GROUPS[i], artifactId));
//}
}
else {
versions = indicesManager.getVersions(groupId, artifactId);
}
for (String version : versions) {
newResultSet.addElement(LookupElementBuilder.create(prefix + version));
}
newResultSet.addElement(LookupElementBuilder.create(prefix + RepositoryLibraryDescription.ReleaseVersionId));
newResultSet.addElement(LookupElementBuilder.create(prefix + RepositoryLibraryDescription.LatestVersionId));
}
@NotNull
private static PsiReference[] getReferences(PsiElement psiElement) {
return psiElement instanceof XmlText ? psiElement.getParent().getReferences() : psiElement.getReferences();
}
private static void consumeDependencyElement(PsiElement psiElement,
Consumer<GrClosableBlock> closureNotationConsumer,
Consumer<PsiElement> stringNotationConsumer) {
final GrClosableBlock owner = PsiTreeUtil.getParentOfType(psiElement, GrClosableBlock.class);
if (owner != null && owner.getParent() instanceof GrMethodCallExpression) {
String invokedExpressionText = ((GrMethodCallExpression)owner.getParent()).getInvokedExpression().getText();
if ("dependency".equals(invokedExpressionText)) {
closureNotationConsumer.consume(owner);
}
if ("dependencies".equals(invokedExpressionText)) {
GrMethodCall methodCall = PsiTreeUtil.getParentOfType(psiElement, GrMethodCall.class);
if (methodCall != null && "dependency".equals(methodCall.getInvokedExpression().getText())) {
stringNotationConsumer.consume(psiElement);
}
}
}
}
private static class MavenDependencyInsertHandler implements InsertHandler<LookupElement> {
private static final InsertHandler<LookupElement> INSTANCE = new MavenDependencyInsertHandler();
@Override
public void handleInsert(final InsertionContext context, LookupElement item) {
String s = item.getLookupString();
int idx = s.indexOf(':');
String groupId = s.substring(0, idx);
String artifactId = s.substring(idx + 1);
int startOffset = context.getStartOffset();
PsiFile psiFile = context.getFile();
PsiElement psiElement = psiFile.findElementAt(startOffset);
consumeDependencyElement(psiElement, closableBlock -> {
int textOffset = closableBlock.getTextOffset();
String value = "{groupId '" + groupId + "'\n" +
"artifactId '" + artifactId + "'\n" +
"version ''}";
context.getDocument().replaceString(textOffset, textOffset + closableBlock.getTextLength(), value);
context.getEditor().getCaretModel().moveToOffset(textOffset + value.length() - 2);
context.commitDocument();
new ReformatCodeProcessor(psiFile.getProject(), psiFile, closableBlock.getTextRange(), false).run();
MavenDependencyCompletionUtil.invokeCompletion(context, CompletionType.BASIC);
}, element -> {
int textOffset = element.getTextOffset();
String value = '\'' + groupId + ":" + artifactId + ":'";
context.getDocument().replaceString(textOffset, textOffset + element.getTextLength(), value);
context.getEditor().getCaretModel().moveToOffset(textOffset + value.length() - 1);
MavenDependencyCompletionUtil.invokeCompletion(context, CompletionType.BASIC);
});
}
}
}
| |
/*******************************************************************************
* Copyright 2016 Jalian Systems Pvt. Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.sourceforge.marathon.javaagent.components;
import java.util.List;
import javax.swing.DefaultListModel;
import javax.swing.JFrame;
import javax.swing.JList;
import javax.swing.SwingUtilities;
import org.testng.AssertJUnit;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import net.sourceforge.marathon.javaagent.IJavaAgent;
import net.sourceforge.marathon.javaagent.IJavaElement;
import net.sourceforge.marathon.javaagent.JavaAgent;
import net.sourceforge.marathon.json.JSONArray;
import net.sourceforge.marathon.testhelpers.ComponentUtils;
@Test
public class JListJavaElementTest extends JavaElementTest {
private IJavaAgent driver;
protected JFrame frame;
@BeforeMethod
public void showDialog() throws Throwable {
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
frame = new JFrame(JListJavaElementTest.class.getName());
frame.setName("dialog-1");
DefaultListModel model = new DefaultListModel();
for (int i = 1; i <= 30; i++) {
model.addElement("List Item - " + i);
}
JList list = new JList(model);
list.setName("list-1");
frame.getContentPane().add(list);
frame.pack();
frame.setAlwaysOnTop(true);
frame.setVisible(true);
}
});
driver = new JavaAgent();
}
@AfterMethod
public void disposeDriver() throws Throwable {
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
frame.setVisible(false);
frame.dispose();
}
});
}
public void cssSelector() throws Throwable {
IJavaElement list = driver.findElementByName("list-1");
JSONArray a = new JSONArray();
for (int i = 1; i <= 30; i++) {
a.put("List Item - " + i);
}
JSONArray b = new JSONArray();
b.put(a);
AssertJUnit.assertEquals(b.toString(), list.getAttribute("content"));
IJavaElement listItem;
List<IJavaElement> listItems;
listItem = driver.findElementByCssSelector("#list-1::nth-item(1)");
AssertJUnit.assertEquals("List Item - 1", listItem.getText());
listItems = driver.findElementsByCssSelector("#list-1::all-items");
AssertJUnit.assertEquals(30, listItems.size());
for (int i = 0; i < 30; i++) {
AssertJUnit.assertEquals("List Item - " + (i + 1), listItems.get(i).getText());
}
List<IJavaElement> firstItem = driver.findElementsByCssSelector("#list-1::all-items[text='List Item - 1']");
AssertJUnit.assertEquals(1, firstItem.size());
AssertJUnit.assertEquals("List Item - 1", firstItem.get(0).getText());
}
public void selectForNoCells() {
IJavaElement list = driver.findElementByName("list-1");
marathon_select(list, "[]");
String attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[]", attribute);
}
public void selectForSingleItem() {
IJavaElement list = driver.findElementByName("list-1");
marathon_select(list, "[List Item - 1]");
String attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[0]", attribute);
}
public void selectForMultipleItems() {
IJavaElement list = driver.findElementByName("list-1");
marathon_select(list, "[List Item - 1, List Item - 2]");
String attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[0, 1]", attribute);
}
public void selectForDuplicateItems() {
IJavaElement listItem;
String attribute;
IJavaElement list = driver.findElementByName("list-1");
marathon_select(list, "[List Item - 1]");
attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[0]", attribute);
listItem = driver.findElementByCssSelector("#list-1::nth-item(1)");
AssertJUnit.assertEquals("List Item - 1", listItem.getText());
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JList jlist = (JList) ComponentUtils.findComponent(JList.class, frame);
DefaultListModel model = (DefaultListModel) jlist.getModel();
model.set(2, "List Item - 1");
}
});
marathon_select(list, "[List Item - 1(1)]");
attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[2]", attribute);
listItem = driver.findElementByCssSelector("#list-1::nth-item(3)");
AssertJUnit.assertEquals("List Item - 1(1)", listItem.getText());
}
public void selectForDuplicateItemsX() {
IJavaElement listItem;
String attribute;
IJavaElement list = driver.findElementByName("list-1");
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JList jlist = (JList) ComponentUtils.findComponent(JList.class, frame);
DefaultListModel model = (DefaultListModel) jlist.getModel();
model.set(2, "List Item - 1");
}
});
marathon_select(list, "[List Item - 1(1)]");
attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[2]", attribute);
listItem = driver.findElementByCssSelector("#list-1::nth-item(3)");
AssertJUnit.assertEquals("List Item - 1(1)", listItem.getText());
}
public void selectForMultipleDuplicates() {
IJavaElement listItem;
String attribute;
IJavaElement list = driver.findElementByName("list-1");
marathon_select(list, "[List Item - 1]");
attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[0]", attribute);
listItem = driver.findElementByCssSelector("#list-1::nth-item(1)");
AssertJUnit.assertEquals("List Item - 1", listItem.getText());
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JList jlist = (JList) ComponentUtils.findComponent(JList.class, frame);
DefaultListModel model = (DefaultListModel) jlist.getModel();
model.set(2, "List Item - 1");
model.set(3, "List Item - 1");
}
});
marathon_select(list, "[List Item - 1(1)]");
attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[2]", attribute);
listItem = driver.findElementByCssSelector("#list-1::nth-item(3)");
AssertJUnit.assertEquals("List Item - 1(1)", listItem.getText());
marathon_select(list, "[List Item - 1(2)]");
attribute = list.getAttribute("selectedIndices");
AssertJUnit.assertEquals("[3]", attribute);
listItem = driver.findElementByCssSelector("#list-1::nth-item(4)");
AssertJUnit.assertEquals("List Item - 1(2)", listItem.getText());
List<IJavaElement> items = list.findElementsByCssSelector(".::all-items");
for (IJavaElement item : items) {
System.out.println(item.getText());
}
}
public void assertContent() {
IJavaElement list = driver.findElementByName("list-1");
String expected = "[[\"List Item - 1\",\"List Item - 2\",\"List Item - 3\",\"List Item - 4\",\"List Item - 5\",\"List Item - 6\",\"List Item - 7\",\"List Item - 8\",\"List Item - 9\",\"List Item - 10\",\"List Item - 11\",\"List Item - 12\",\"List Item - 13\",\"List Item - 14\",\"List Item - 15\",\"List Item - 16\",\"List Item - 17\",\"List Item - 18\",\"List Item - 19\",\"List Item - 20\",\"List Item - 21\",\"List Item - 22\",\"List Item - 23\",\"List Item - 24\",\"List Item - 25\",\"List Item - 26\",\"List Item - 27\",\"List Item - 28\",\"List Item - 29\",\"List Item - 30\"]]";
AssertJUnit.assertEquals(expected, list.getAttribute("content"));
}
public void assertContentWithDuplicates() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JList jlist = (JList) ComponentUtils.findComponent(JList.class, frame);
DefaultListModel model = (DefaultListModel) jlist.getModel();
model.set(2, "List Item - 1");
}
});
IJavaElement list = driver.findElementByName("list-1");
String expected = "[[\"List Item - 1\",\"List Item - 2\",\"List Item - 1(1)\",\"List Item - 4\",\"List Item - 5\",\"List Item - 6\",\"List Item - 7\",\"List Item - 8\",\"List Item - 9\",\"List Item - 10\",\"List Item - 11\",\"List Item - 12\",\"List Item - 13\",\"List Item - 14\",\"List Item - 15\",\"List Item - 16\",\"List Item - 17\",\"List Item - 18\",\"List Item - 19\",\"List Item - 20\",\"List Item - 21\",\"List Item - 22\",\"List Item - 23\",\"List Item - 24\",\"List Item - 25\",\"List Item - 26\",\"List Item - 27\",\"List Item - 28\",\"List Item - 29\",\"List Item - 30\"]]";
AssertJUnit.assertEquals(expected, list.getAttribute("content"));
}
public void assertContentWithMultipleDuplicates() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JList jlist = (JList) ComponentUtils.findComponent(JList.class, frame);
DefaultListModel model = (DefaultListModel) jlist.getModel();
model.set(2, "List Item - 1");
model.set(3, "List Item - 1");
}
});
IJavaElement list = driver.findElementByName("list-1");
String expected = "[[\"List Item - 1\",\"List Item - 2\",\"List Item - 1(1)\",\"List Item - 1(2)\",\"List Item - 5\",\"List Item - 6\",\"List Item - 7\",\"List Item - 8\",\"List Item - 9\",\"List Item - 10\",\"List Item - 11\",\"List Item - 12\",\"List Item - 13\",\"List Item - 14\",\"List Item - 15\",\"List Item - 16\",\"List Item - 17\",\"List Item - 18\",\"List Item - 19\",\"List Item - 20\",\"List Item - 21\",\"List Item - 22\",\"List Item - 23\",\"List Item - 24\",\"List Item - 25\",\"List Item - 26\",\"List Item - 27\",\"List Item - 28\",\"List Item - 29\",\"List Item - 30\"]]";
AssertJUnit.assertEquals(expected, list.getAttribute("content"));
}
}
| |
/* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2007, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* ---------------
* Statistics.java
* ---------------
* (C) Copyright 2000-2007, by Matthew Wright and Contributors.
*
* Original Author: Matthew Wright;
* Contributor(s): David Gilbert (for Object Refinery Limited);
*
* Changes (from 08-Nov-2001)
* --------------------------
* 08-Nov-2001 : Added standard header and tidied Javadoc comments (DG);
* Moved from JFreeChart to package com.jrefinery.data.* in
* JCommon class library (DG);
* 24-Jun-2002 : Removed unnecessary local variable (DG);
* 07-Oct-2002 : Fixed errors reported by Checkstyle (DG);
* 26-May-2004 : Moved calculateMean() method from BoxAndWhiskerCalculator (DG);
* 02-Jun-2004 : Fixed bug in calculateMedian() method (DG);
* 11-Jan-2005 : Removed deprecated code in preparation for the 1.0.0
* release (DG);
*
*/
package org.jfree.data.statistics;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* A utility class that provides some common statistical functions.
*/
public abstract class Statistics {
/**
* Returns the mean of an array of numbers. This is equivalent to calling
* <code>calculateMean(values, true)</code>.
*
* @param values the values (<code>null</code> not permitted).
*
* @return The mean.
*/
public static double calculateMean(Number[] values) {
return calculateMean(values, true);
}
/**
* Returns the mean of an array of numbers.
*
* @param values the values (<code>null</code> not permitted).
* @param includeNullAndNaN a flag that controls whether or not
* <code>null</code> and <code>Double.NaN</code> values are included
* in the calculation (if either is present in the array, the result is
* {@link Double#NaN}).
*
* @return The mean.
*
* @since 1.0.3
*/
public static double calculateMean(Number[] values,
boolean includeNullAndNaN) {
if (values == null) {
throw new IllegalArgumentException("Null 'values' argument.");
}
double sum = 0.0;
double current;
int counter = 0;
for (int i = 0; i < values.length; i++) {
// treat nulls the same as NaNs
if (values[i] != null) {
current = values[i].doubleValue();
}
else {
current = Double.NaN;
}
// calculate the sum and count
if (includeNullAndNaN || !Double.isNaN(current)) {
sum = sum + current;
counter++;
}
}
double result = (sum / counter);
return result;
}
/**
* Returns the mean of a collection of <code>Number</code> objects.
*
* @param values the values (<code>null</code> not permitted).
*
* @return The mean.
*/
public static double calculateMean(Collection values) {
return calculateMean(values, true);
}
/**
* Returns the mean of a collection of <code>Number</code> objects.
*
* @param values the values (<code>null</code> not permitted).
* @param includeNullAndNaN a flag that controls whether or not
* <code>null</code> and <code>Double.NaN</code> values are included
* in the calculation (if either is present in the array, the result is
* {@link Double#NaN}).
*
* @return The mean.
*
* @since 1.0.3
*/
public static double calculateMean(Collection values,
boolean includeNullAndNaN) {
if (values == null) {
throw new IllegalArgumentException("Null 'values' argument.");
}
int count = 0;
double total = 0.0;
Iterator iterator = values.iterator();
while (iterator.hasNext()) {
Object object = iterator.next();
if (object == null) {
if (includeNullAndNaN) {
return Double.NaN;
}
}
else {
if (object instanceof Number) {
Number number = (Number) object;
double value = number.doubleValue();
if (Double.isNaN(value)) {
if (includeNullAndNaN) {
return Double.NaN;
}
}
else {
total = total + number.doubleValue();
count = count + 1;
}
}
}
}
return total / count;
}
/**
* Calculates the median for a list of values (<code>Number</code> objects).
* The list of values will be copied, and the copy sorted, before
* calculating the median. To avoid this step (if your list of values
* is already sorted), use the {@link #calculateMedian(List, boolean)}
* method.
*
* @param values the values (<code>null</code> permitted).
*
* @return The median.
*/
public static double calculateMedian(List values) {
return calculateMedian(values, true);
}
/**
* Calculates the median for a list of values (<code>Number</code> objects).
* If <code>copyAndSort</code> is <code>false</code>, the list is assumed
* to be presorted in ascending order by value.
*
* @param values the values (<code>null</code> permitted).
* @param copyAndSort a flag that controls whether the list of values is
* copied and sorted.
*
* @return The median.
*/
public static double calculateMedian(List values, boolean copyAndSort) {
double result = Double.NaN;
if (values != null) {
if (copyAndSort) {
int itemCount = values.size();
List copy = new ArrayList(itemCount);
for (int i = 0; i < itemCount; i++) {
copy.add(i, values.get(i));
}
Collections.sort(copy);
values = copy;
}
int count = values.size();
if (count > 0) {
if (count % 2 == 1) {
if (count > 1) {
Number value = (Number) values.get((count - 1) / 2);
result = value.doubleValue();
}
else {
Number value = (Number) values.get(0);
result = value.doubleValue();
}
}
else {
Number value1 = (Number) values.get(count / 2 - 1);
Number value2 = (Number) values.get(count / 2);
result = (value1.doubleValue() + value2.doubleValue())
/ 2.0;
}
}
}
return result;
}
/**
* Calculates the median for a sublist within a list of values
* (<code>Number</code> objects).
*
* @param values the values, in any order (<code>null</code> not
* permitted).
* @param start the start index.
* @param end the end index.
*
* @return The median.
*/
public static double calculateMedian(List values, int start, int end) {
return calculateMedian(values, start, end, true);
}
/**
* Calculates the median for a sublist within a list of values
* (<code>Number</code> objects). The entire list will be sorted if the
* <code>ascending</code< argument is <code>false</code>.
*
* @param values the values (<code>null</code> not permitted).
* @param start the start index.
* @param end the end index.
* @param copyAndSort a flag that that controls whether the list of values
* is copied and sorted.
*
* @return The median.
*/
public static double calculateMedian(List values, int start, int end,
boolean copyAndSort) {
double result = Double.NaN;
if (copyAndSort) {
List working = new ArrayList(end - start + 1);
for (int i = start; i <= end; i++) {
working.add(values.get(i));
}
Collections.sort(working);
result = calculateMedian(working, false);
}
else {
int count = end - start + 1;
if (count > 0) {
if (count % 2 == 1) {
if (count > 1) {
Number value
= (Number) values.get(start + (count - 1) / 2);
result = value.doubleValue();
}
else {
Number value = (Number) values.get(start);
result = value.doubleValue();
}
}
else {
Number value1 = (Number) values.get(start + count / 2 - 1);
Number value2 = (Number) values.get(start + count / 2);
result
= (value1.doubleValue() + value2.doubleValue()) / 2.0;
}
}
}
return result;
}
/**
* Returns the standard deviation of a set of numbers.
*
* @param data the data (<code>null</code> or zero length array not
* permitted).
*
* @return The standard deviation of a set of numbers.
*/
public static double getStdDev(Number[] data) {
if (data == null) {
throw new IllegalArgumentException("Null 'data' array.");
}
if (data.length == 0) {
throw new IllegalArgumentException("Zero length 'data' array.");
}
double avg = calculateMean(data);
double sum = 0.0;
for (int counter = 0; counter < data.length; counter++) {
double diff = data[counter].doubleValue() - avg;
sum = sum + diff * diff;
}
return Math.sqrt(sum / (data.length - 1));
}
/**
* Fits a straight line to a set of (x, y) data, returning the slope and
* intercept.
*
* @param xData the x-data (<code>null</code> not permitted).
* @param yData the y-data (<code>null</code> not permitted).
*
* @return A double array with the intercept in [0] and the slope in [1].
*/
public static double[] getLinearFit(Number[] xData, Number[] yData) {
if (xData == null) {
throw new IllegalArgumentException("Null 'xData' argument.");
}
if (yData == null) {
throw new IllegalArgumentException("Null 'yData' argument.");
}
if (xData.length != yData.length) {
throw new IllegalArgumentException(
"Statistics.getLinearFit(): array lengths must be equal.");
}
double[] result = new double[2];
// slope
result[1] = getSlope(xData, yData);
// intercept
result[0] = calculateMean(yData) - result[1] * calculateMean(xData);
return result;
}
/**
* Finds the slope of a regression line using least squares.
*
* @param xData the x-values (<code>null</code> not permitted).
* @param yData the y-values (<code>null</code> not permitted).
*
* @return The slope.
*/
public static double getSlope(Number[] xData, Number[] yData) {
if (xData == null) {
throw new IllegalArgumentException("Null 'xData' argument.");
}
if (yData == null) {
throw new IllegalArgumentException("Null 'yData' argument.");
}
if (xData.length != yData.length) {
throw new IllegalArgumentException("Array lengths must be equal.");
}
// ********* stat function for linear slope ********
// y = a + bx
// a = ybar - b * xbar
// sum(x * y) - (sum (x) * sum(y)) / n
// b = ------------------------------------
// sum (x^2) - (sum(x)^2 / n
// *************************************************
// sum of x, x^2, x * y, y
double sx = 0.0, sxx = 0.0, sxy = 0.0, sy = 0.0;
int counter;
for (counter = 0; counter < xData.length; counter++) {
sx = sx + xData[counter].doubleValue();
sxx = sxx + Math.pow(xData[counter].doubleValue(), 2);
sxy = sxy + yData[counter].doubleValue()
* xData[counter].doubleValue();
sy = sy + yData[counter].doubleValue();
}
return (sxy - (sx * sy) / counter) / (sxx - (sx * sx) / counter);
}
/**
* Calculates the correlation between two datasets. Both arrays should
* contain the same number of items. Null values are treated as zero.
* <P>
* Information about the correlation calculation was obtained from:
*
* http://trochim.human.cornell.edu/kb/statcorr.htm
*
* @param data1 the first dataset.
* @param data2 the second dataset.
*
* @return The correlation.
*/
public static double getCorrelation(Number[] data1, Number[] data2) {
if (data1 == null) {
throw new IllegalArgumentException("Null 'data1' argument.");
}
if (data2 == null) {
throw new IllegalArgumentException("Null 'data2' argument.");
}
if (data1.length != data2.length) {
throw new IllegalArgumentException(
"'data1' and 'data2' arrays must have same length."
);
}
int n = data1.length;
double sumX = 0.0;
double sumY = 0.0;
double sumX2 = 0.0;
double sumY2 = 0.0;
double sumXY = 0.0;
for (int i = 0; i < n; i++) {
double x = 0.0;
if (data1[i] != null) {
x = data1[i].doubleValue();
}
double y = 0.0;
if (data2[i] != null) {
y = data2[i].doubleValue();
}
sumX = sumX + x;
sumY = sumY + y;
sumXY = sumXY + (x * y);
sumX2 = sumX2 + (x * x);
sumY2 = sumY2 + (y * y);
}
return (n * sumXY - sumX * sumY) / Math.pow((n * sumX2 - sumX * sumX)
* (n * sumY2 - sumY * sumY), 0.5);
}
/**
* Returns a data set for a moving average on the data set passed in.
*
* @param xData an array of the x data.
* @param yData an array of the y data.
* @param period the number of data points to average
*
* @return A double[][] the length of the data set in the first dimension,
* with two doubles for x and y in the second dimension
*/
public static double[][] getMovingAverage(Number[] xData,
Number[] yData,
int period) {
// check arguments...
if (xData.length != yData.length) {
throw new IllegalArgumentException("Array lengths must be equal.");
}
if (period > xData.length) {
throw new IllegalArgumentException(
"Period can't be longer than dataset."
);
}
double[][] result = new double[xData.length - period][2];
for (int i = 0; i < result.length; i++) {
result[i][0] = xData[i + period].doubleValue();
// holds the moving average sum
double sum = 0.0;
for (int j = 0; j < period; j++) {
sum += yData[i + j].doubleValue();
}
sum = sum / period;
result[i][1] = sum;
}
return result;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query.functionscore;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES;
/**
* A query that computes a document score based on the provided script
*/
public class ScriptScoreQueryBuilder extends AbstractQueryBuilder<ScriptScoreQueryBuilder> {
public static final String NAME = "script_score";
public static final ParseField QUERY_FIELD = new ParseField("query");
public static final ParseField SCRIPT_FIELD = new ParseField("script");
public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score");
private static final ConstructingObjectParser<ScriptScoreQueryBuilder, Void> PARSER = new ConstructingObjectParser<>(NAME, false,
args -> {
ScriptScoreQueryBuilder ssQueryBuilder = new ScriptScoreQueryBuilder((QueryBuilder) args[0], (Script) args[1]);
if (args[2] != null) ssQueryBuilder.setMinScore((Float) args[2]);
if (args[3] != null) ssQueryBuilder.boost((Float) args[3]);
if (args[4] != null) ssQueryBuilder.queryName((String) args[4]);
return ssQueryBuilder;
});
static {
PARSER.declareObject(constructorArg(), (p,c) -> parseInnerQueryBuilder(p), QUERY_FIELD);
PARSER.declareObject(constructorArg(), (p,c) -> Script.parse(p), SCRIPT_FIELD);
PARSER.declareFloat(optionalConstructorArg(), MIN_SCORE_FIELD);
PARSER.declareFloat(optionalConstructorArg(), AbstractQueryBuilder.BOOST_FIELD);
PARSER.declareString(optionalConstructorArg(), AbstractQueryBuilder.NAME_FIELD);
}
public static ScriptScoreQueryBuilder fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final QueryBuilder query;
private Float minScore = null;
private final Script script;
/**
* Creates a script_score query that executes the provided script function on documents that match a query.
*
* @param query the query that defines which documents the script_score query will be executed on.
* @param script the script to run for computing the query score
*/
public ScriptScoreQueryBuilder(QueryBuilder query, Script script) {
// require the supply of the query, even the explicit supply of "match_all" query
if (query == null) {
throw new IllegalArgumentException("script_score: query must not be null");
}
if (script == null) {
throw new IllegalArgumentException("script_score: script must not be null");
}
this.query = query;
this.script = script;
}
/**
* Read from a stream.
*/
public ScriptScoreQueryBuilder(StreamInput in) throws IOException {
super(in);
query = in.readNamedWriteable(QueryBuilder.class);
script = new Script(in);
minScore = in.readOptionalFloat();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(query);
script.writeTo(out);
out.writeOptionalFloat(minScore);
}
/**
* Returns the query builder that defines which documents the script_score query will be executed on.
*/
public QueryBuilder query() {
return this.query;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(QUERY_FIELD.getPreferredName());
query.toXContent(builder, params);
builder.field(SCRIPT_FIELD.getPreferredName(), script);
if (minScore != null) {
builder.field(MIN_SCORE_FIELD.getPreferredName(), minScore);
}
printBoostAndQueryName(builder);
builder.endObject();
}
public ScriptScoreQueryBuilder setMinScore(float minScore) {
this.minScore = minScore;
return this;
}
public Float getMinScore() {
return this.minScore;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected boolean doEquals(ScriptScoreQueryBuilder other) {
return Objects.equals(this.query, other.query) &&
Objects.equals(this.script, other.script) &&
Objects.equals(this.minScore, other.minScore) ;
}
@Override
protected int doHashCode() {
return Objects.hash(this.query, this.script, this.minScore);
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
if (context.allowExpensiveQueries() == false) {
throw new ElasticsearchException("[script score] queries cannot be executed when '"
+ ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false.");
}
ScoreScript.Factory factory = context.compile(script, ScoreScript.CONTEXT);
ScoreScript.LeafFactory scoreScriptFactory = factory.newFactory(script.getParams(), context.lookup());
Query query = this.query.toQuery(context);
return new ScriptScoreQuery(query, script, scoreScriptFactory, minScore,
context.index().getName(), context.getShardId(), context.indexVersionCreated());
}
@Override
protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder newQuery = this.query.rewrite(queryRewriteContext);
if (newQuery instanceof MatchNoneQueryBuilder) {
return newQuery;
}
if (newQuery != query) {
ScriptScoreQueryBuilder newQueryBuilder = new ScriptScoreQueryBuilder(newQuery, script);
if (minScore != null) {
newQueryBuilder.setMinScore(minScore);
}
return newQueryBuilder;
}
return this;
}
@Override
protected void extractInnerHitBuilders(Map<String, InnerHitContextBuilder> innerHits) {
InnerHitContextBuilder.extractInnerHits(query(), innerHits);
}
}
| |
package com.planet_ink.coffee_mud.core.collections;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import com.planet_ink.coffee_mud.core.interfaces.CMObject;
public class SortedStrSVector<T> extends SVector<T> implements SearchIDList<T>
{
private static final long serialVersionUID = 6687178785122361992L;
private final Str<T> stringer;
public static interface Str<T>
{
public String toString(T t);
}
public SortedStrSVector(Str<T> stringer, int size)
{
super(size);
this.stringer = stringer;
}
public SortedStrSVector(Str<T> stringer)
{
super();
this.stringer = stringer;
}
private int compareTo(T arg0, String arg1)
{
return stringer.toString(arg0).compareToIgnoreCase(arg1);
}
private int compareTo(T arg0, T arg1)
{
return stringer.toString(arg0).compareToIgnoreCase(stringer.toString(arg1));
}
@Override
public synchronized boolean add(T arg0)
{
if (arg0 == null)
return false;
if (size() == 0)
return super.add(arg0);
int start = 0;
int end = size() - 1;
int comp = -1;
int mid = -1;
while (start <= end)
{
mid = (end + start) / 2;
comp = compareTo(super.get(mid), arg0);
if (comp == 0)
break;
else if (comp > 0)
end = mid - 1;
else
start = mid + 1;
}
if (comp == 0)
super.add(mid, arg0);
else
if (comp > 0)
{
while ((mid >= 0) && (compareTo(super.get(mid), arg0) > 0))
mid--;
if (mid >= size() - 1)
super.add(arg0);
else if (mid < 0)
super.add(0, arg0);
else
super.add(mid + 1, arg0);
}
else
{
while ((mid < size()) && (compareTo(super.get(mid), arg0) < 0))
mid++;
if (mid >= size())
super.add(arg0);
else
super.add(mid, arg0);
}
return true;
}
@Override
public void add(int arg0, T arg1)
{
throw new java.lang.UnsupportedOperationException();
}
@Override
public boolean addAll(int arg0, Collection<? extends T> arg1)
{
throw new java.lang.UnsupportedOperationException();
}
@Override
public boolean contains(Object arg0)
{
return indexOf(arg0) >= 0;
}
@Override
public boolean containsAll(Collection<?> arg0)
{
for (final Object o : arg0)
{
if (!contains(o))
return false;
}
return true;
}
@Override
public T get(int arg0)
{
return super.get(arg0);
}
public synchronized void reSort(T arg0)
{
if (super.contains(arg0))
{
super.remove(arg0);
this.add(arg0);
}
}
@SuppressWarnings("unchecked")
@Override
public synchronized int indexOf(Object arg0)
{
if (arg0 == null)
return -1;
if (size() == 0)
return -1;
int start = 0;
int end = size() - 1;
if (arg0 instanceof CMObject)
{
while (start <= end)
{
final int mid = (end + start) / 2;
final int comp = compareTo(super.get(mid), (T) arg0);
if (comp == 0)
return mid;
else if (comp > 0)
end = mid - 1;
else
start = mid + 1;
}
}
else if (arg0 instanceof String)
{
while (start <= end)
{
final int mid = (end + start) / 2;
final int comp = compareTo(super.get(mid), (String) arg0);
if (comp == 0)
return mid;
else if (comp > 0)
end = mid - 1;
else
start = mid + 1;
}
}
return -1;
}
@Override
public synchronized T find(String arg0)
{
if (arg0 == null)
return null;
if (size() == 0)
return null;
int start = 0;
int end = size() - 1;
while (start <= end)
{
final int mid = (end + start) / 2;
final int comp = compareTo(super.get(mid), arg0);
if (comp == 0)
return super.get(mid);
else if (comp > 0)
end = mid - 1;
else
start = mid + 1;
}
return null;
}
@Override
public synchronized T find(T arg0)
{
if (arg0 == null)
return null;
if (size() == 0)
return null;
int start = 0;
int end = size() - 1;
while (start <= end)
{
final int mid = (end + start) / 2;
final int comp = compareTo(super.get(mid), arg0);
if (comp == 0)
return super.get(mid);
else if (comp > 0)
end = mid - 1;
else
start = mid + 1;
}
return null;
}
@Override
public synchronized int lastIndexOf(Object arg0)
{
return indexOf(arg0); // only holds one-of-a-kind, so all is well!
}
@Override
public synchronized boolean remove(Object arg0)
{
final int index = indexOf(arg0);
if (index >= 0)
return remove(index) == arg0;
return false;
}
@Override
public T set(int arg0, T arg1)
{
throw new java.lang.UnsupportedOperationException();
}
}
| |
// Generated from MetaGeneratorParser.g4 by ANTLR 4.5.1
package metadslx.compiler;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
/**
* This class provides an empty implementation of {@link MetaGeneratorParserVisitor},
* which can be extended to create a visitor which only needs to handle a subset
* of the available methods.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
public class MetaGeneratorParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements MetaGeneratorParserVisitor<T> {
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMain(MetaGeneratorParser.MainContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNamespaceDeclaration(MetaGeneratorParser.NamespaceDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitGeneratorDeclaration(MetaGeneratorParser.GeneratorDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitUsingNamespaceDeclaration(MetaGeneratorParser.UsingNamespaceDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitUsingGeneratorDeclaration(MetaGeneratorParser.UsingGeneratorDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitConfigDeclaration(MetaGeneratorParser.ConfigDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitConfigPropertyDeclaration(MetaGeneratorParser.ConfigPropertyDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitConfigPropertyGroupDeclaration(MetaGeneratorParser.ConfigPropertyGroupDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMethodDeclaration(MetaGeneratorParser.MethodDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFunctionDeclaration(MetaGeneratorParser.FunctionDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFunctionSignature(MetaGeneratorParser.FunctionSignatureContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitParamList(MetaGeneratorParser.ParamListContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitParameter(MetaGeneratorParser.ParameterContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBody(MetaGeneratorParser.BodyContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitStatement(MetaGeneratorParser.StatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitVariableDeclarationStatement(MetaGeneratorParser.VariableDeclarationStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitReturnStatement(MetaGeneratorParser.ReturnStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExpressionStatement(MetaGeneratorParser.ExpressionStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIfStatement(MetaGeneratorParser.IfStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIfStatementBegin(MetaGeneratorParser.IfStatementBeginContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitElseIfStatement(MetaGeneratorParser.ElseIfStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIfStatementElse(MetaGeneratorParser.IfStatementElseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIfStatementEnd(MetaGeneratorParser.IfStatementEndContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopStatement(MetaGeneratorParser.LoopStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopStatementBegin(MetaGeneratorParser.LoopStatementBeginContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopStatementEnd(MetaGeneratorParser.LoopStatementEndContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopChain(MetaGeneratorParser.LoopChainContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopChainItem(MetaGeneratorParser.LoopChainItemContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopChainMemberAccessExpression(MetaGeneratorParser.LoopChainMemberAccessExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopChainMethodCallExpression(MetaGeneratorParser.LoopChainMethodCallExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopChainIdentifierExpression(MetaGeneratorParser.LoopChainIdentifierExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopChainTypeofExpression(MetaGeneratorParser.LoopChainTypeofExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopWhereExpression(MetaGeneratorParser.LoopWhereExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopRunExpression(MetaGeneratorParser.LoopRunExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopRunList(MetaGeneratorParser.LoopRunListContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopRun(MetaGeneratorParser.LoopRunContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchStatement(MetaGeneratorParser.SwitchStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchStatementBegin(MetaGeneratorParser.SwitchStatementBeginContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchStatementEnd(MetaGeneratorParser.SwitchStatementEndContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchBranchStatement(MetaGeneratorParser.SwitchBranchStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchBranchHeadStatement(MetaGeneratorParser.SwitchBranchHeadStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchCaseOrTypeIsHeadStatement(MetaGeneratorParser.SwitchCaseOrTypeIsHeadStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchCaseHeadStatement(MetaGeneratorParser.SwitchCaseHeadStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchTypeIsHeadStatement(MetaGeneratorParser.SwitchTypeIsHeadStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchTypeAsHeadStatement(MetaGeneratorParser.SwitchTypeAsHeadStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchDefaultStatement(MetaGeneratorParser.SwitchDefaultStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSwitchDefaultHeadStatement(MetaGeneratorParser.SwitchDefaultHeadStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTemplateDeclaration(MetaGeneratorParser.TemplateDeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTemplateSignature(MetaGeneratorParser.TemplateSignatureContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTemplateBody(MetaGeneratorParser.TemplateBodyContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTemplateContentLine(MetaGeneratorParser.TemplateContentLineContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTemplateOutput(MetaGeneratorParser.TemplateOutputContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTemplateLineEnd(MetaGeneratorParser.TemplateLineEndContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTemplateStatementStartEnd(MetaGeneratorParser.TemplateStatementStartEndContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTemplateStatement(MetaGeneratorParser.TemplateStatementContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTypeArgumentList(MetaGeneratorParser.TypeArgumentListContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPredefinedType(MetaGeneratorParser.PredefinedTypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTypeReferenceList(MetaGeneratorParser.TypeReferenceListContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTypeReference(MetaGeneratorParser.TypeReferenceContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArrayType(MetaGeneratorParser.ArrayTypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNullableType(MetaGeneratorParser.NullableTypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitGenericType(MetaGeneratorParser.GenericTypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSimpleType(MetaGeneratorParser.SimpleTypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitVoidType(MetaGeneratorParser.VoidTypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitReturnType(MetaGeneratorParser.ReturnTypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExpressionList(MetaGeneratorParser.ExpressionListContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitVariableReference(MetaGeneratorParser.VariableReferenceContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitRankSpecifiers(MetaGeneratorParser.RankSpecifiersContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitRankSpecifier(MetaGeneratorParser.RankSpecifierContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitUnboundTypeName(MetaGeneratorParser.UnboundTypeNameContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitGenericDimensionSpecifier(MetaGeneratorParser.GenericDimensionSpecifierContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExplicitAnonymousFunctionSignature(MetaGeneratorParser.ExplicitAnonymousFunctionSignatureContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitImplicitAnonymousFunctionSignature(MetaGeneratorParser.ImplicitAnonymousFunctionSignatureContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSingleParamAnonymousFunctionSignature(MetaGeneratorParser.SingleParamAnonymousFunctionSignatureContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExplicitParameter(MetaGeneratorParser.ExplicitParameterContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitImplicitParameter(MetaGeneratorParser.ImplicitParameterContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTypeofUnboundTypeExpression(MetaGeneratorParser.TypeofUnboundTypeExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitAdditionExpression(MetaGeneratorParser.AdditionExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTypecheckExpression(MetaGeneratorParser.TypecheckExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitRelationalExpression(MetaGeneratorParser.RelationalExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIdentifierExpression(MetaGeneratorParser.IdentifierExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPostExpression(MetaGeneratorParser.PostExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTypeofVoidExpression(MetaGeneratorParser.TypeofVoidExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitParenthesizedExpression(MetaGeneratorParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBitwiseAndExpression(MetaGeneratorParser.BitwiseAndExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLogicalAndExpression(MetaGeneratorParser.LogicalAndExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFunctionCallExpression(MetaGeneratorParser.FunctionCallExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMemberAccessExpression(MetaGeneratorParser.MemberAccessExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitUnaryExpression(MetaGeneratorParser.UnaryExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTypecastExpression(MetaGeneratorParser.TypecastExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLogicalXorExpression(MetaGeneratorParser.LogicalXorExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPredefinedTypeMemberAccessExpression(MetaGeneratorParser.PredefinedTypeMemberAccessExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBitwiseXorExpression(MetaGeneratorParser.BitwiseXorExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLambdaExpression(MetaGeneratorParser.LambdaExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDefaultValueExpression(MetaGeneratorParser.DefaultValueExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitConditionalExpression(MetaGeneratorParser.ConditionalExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitAssignmentExpression(MetaGeneratorParser.AssignmentExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMultiplicationExpression(MetaGeneratorParser.MultiplicationExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNewObjectOrCollectionWithConstructorExpression(MetaGeneratorParser.NewObjectOrCollectionWithConstructorExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLogicalOrExpression(MetaGeneratorParser.LogicalOrExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBitwiseOrExpression(MetaGeneratorParser.BitwiseOrExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitHasLoopExpression(MetaGeneratorParser.HasLoopExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitThisExpression(MetaGeneratorParser.ThisExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitElementAccessExpression(MetaGeneratorParser.ElementAccessExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTypeofTypeExpression(MetaGeneratorParser.TypeofTypeExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitEqualityExpression(MetaGeneratorParser.EqualityExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLiteralExpression(MetaGeneratorParser.LiteralExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitQualifiedName(MetaGeneratorParser.QualifiedNameContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIdentifierList(MetaGeneratorParser.IdentifierListContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIdentifier(MetaGeneratorParser.IdentifierContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLiteral(MetaGeneratorParser.LiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNullLiteral(MetaGeneratorParser.NullLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBooleanLiteral(MetaGeneratorParser.BooleanLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNumberLiteral(MetaGeneratorParser.NumberLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIntegerLiteral(MetaGeneratorParser.IntegerLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDecimalLiteral(MetaGeneratorParser.DecimalLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitScientificLiteral(MetaGeneratorParser.ScientificLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDateOrTimeLiteral(MetaGeneratorParser.DateOrTimeLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDateTimeOffsetLiteral(MetaGeneratorParser.DateTimeOffsetLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDateTimeLiteral(MetaGeneratorParser.DateTimeLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDateLiteral(MetaGeneratorParser.DateLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTimeLiteral(MetaGeneratorParser.TimeLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCharLiteral(MetaGeneratorParser.CharLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitStringLiteral(MetaGeneratorParser.StringLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitGuidLiteral(MetaGeneratorParser.GuidLiteralContext ctx) { return visitChildren(ctx); }
}
| |
/*
* To change this template, choose Tools | Templates and open the template in the editor.
*/
package com.blazebit.security.web.bean.base;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.primefaces.model.DefaultTreeNode;
import org.primefaces.model.TreeNode;
import com.blazebit.security.PermissionUtils;
import com.blazebit.security.data.PermissionHandling;
import com.blazebit.security.entity.EntityPermissionUtils;
import com.blazebit.security.entity.EntityResourceFactory;
import com.blazebit.security.model.Action;
import com.blazebit.security.model.EntityAction;
import com.blazebit.security.model.EntityField;
import com.blazebit.security.model.EntityObjectField;
import com.blazebit.security.model.Permission;
import com.blazebit.security.model.Resource;
import com.blazebit.security.spi.PermissionFactory;
import com.blazebit.security.web.bean.model.TreeNodeModel;
import com.blazebit.security.web.bean.model.TreeNodeModel.Marking;
import com.blazebit.security.web.util.Constants;
/**
*
* @author cuszk
*/
public class PermissionTreeHandlingBaseBean extends TreeHandlingBaseBean {
/**
*
*/
private static final long serialVersionUID = 1L;
@Inject
protected PermissionHandling permissionHandling;
@Inject
protected PermissionFactory permissionFactory;
@Inject
protected EntityResourceFactory entityFieldFactory;
protected void selectChildrenInstances(TreeNode selectedNode, boolean select) {
TreeNodeModel model = (TreeNodeModel) selectedNode.getData();
for (TreeNodeModel instance : model.getInstances()) {
instance.setSelected(select);
}
for (TreeNode child : selectedNode.getChildren()) {
selectChildrenInstances(child, select);
}
}
protected TreeNode buildCurrentPermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> grantable, Set<Permission> revokable, Set<Permission> replaced, boolean hideFieldLevel) {
return getImmutablePermissionTree(permissions, dataPermissions, concat(replaced, revokable), Marking.REMOVED, hideFieldLevel);
}
protected TreeNode buildCurrentPermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> revokable, Set<Permission> replaced, boolean hideFieldLevel) {
return getImmutablePermissionTree(root, permissions, dataPermissions, concat(replaced, revokable), Marking.REMOVED, hideFieldLevel);
}
protected TreeNode buildNewPermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> grantable, Set<Permission> revokable, Set<Permission> replaced, boolean hideFieldLevel, boolean mutable) {
TreeNode root = new DefaultTreeNode();
return buildNewPermissionTree(root, permissions, dataPermissions, grantable, revokable, replaced, hideFieldLevel, mutable, true);
}
protected TreeNode buildNewPermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> grantable, Set<Permission> revokable, Set<Permission> replaced, boolean hideFieldLevel, boolean mutable, boolean addEmptyMessage) {
TreeNode root = new DefaultTreeNode();
return buildNewPermissionTree(root, permissions, dataPermissions, grantable, revokable, replaced, hideFieldLevel, mutable, addEmptyMessage);
}
protected TreeNode buildNewDataPermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> grantable, Set<Permission> revokable, Set<Permission> replaced, boolean hideFieldLevel, boolean mutable) {
TreeNode root = new DefaultTreeNode();
return buildNewDataPermissionTree(root, permissions, dataPermissions, grantable, revokable, replaced, hideFieldLevel, mutable);
}
protected TreeNode buildNewDataPermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> grantable, Set<Permission> revokable, Set<Permission> replaced, boolean hideFieldLevel, boolean mutable) {
// new permission tree
List<Permission> currentDataPermissions = new ArrayList<Permission>(dataPermissions);
currentDataPermissions = new ArrayList<Permission>(PermissionUtils.removeAll(currentDataPermissions, concat(replaced, revokable)));
// existing
SortedMap<String, List<Permission>> entityPermissions = EntityPermissionUtils.groupPermissionsByResourceName(currentDataPermissions);
for (String entity : entityPermissions.keySet()) {
List<Permission> permissionsByEntity = entityPermissions.get(entity);
createEntityNode(root, Collections.<Permission>emptySet(), Collections.<Permission>emptySet(), Marking.NONE, Marking.NONE, entity, permissionsByEntity, Collections.<Permission>emptyList(), mutable,
hideFieldLevel);
}
// granted
entityPermissions = EntityPermissionUtils.groupPermissionsByResourceName(grantable);
for (String entity : entityPermissions.keySet()) {
List<Permission> permissionsByEntity = entityPermissions.get(entity);
createEntityNode(root, new HashSet<Permission>(permissionsByEntity), Collections.<Permission>emptySet(), Marking.NEW, Marking.NONE, entity, permissionsByEntity,
Collections.<Permission>emptyList(), mutable, hideFieldLevel);
}
// revoked
entityPermissions = EntityPermissionUtils.groupPermissionsByResourceName(revokable);
for (String entity : entityPermissions.keySet()) {
List<Permission> permissionsByEntity = entityPermissions.get(entity);
createEntityNode(root, Collections.<Permission>emptySet(), new HashSet<Permission>(permissionsByEntity), Marking.NONE, Marking.REMOVED, entity, permissionsByEntity,
Collections.<Permission>emptyList(), mutable, hideFieldLevel);
}
return root;
}
protected TreeNode buildNewPermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> grantable, Set<Permission> revokable, Set<Permission> replaced, boolean hideFieldLevel, boolean userLevelEnabled, boolean addEmptyMessage) {
// new permission tree
List<Permission> currentPermissions = new ArrayList<Permission>(permissions);
// the mutable permission tree shows the removed permissions too, therefore remove only the replaced ones
currentPermissions = new ArrayList<Permission>(PermissionUtils.removeAll(currentPermissions, replaced));
// always separate permissions and data permissions
currentPermissions.addAll(EntityPermissionUtils.getSeparatedPermissionsByResource(grantable).get(0));
List<Permission> currentDataPermissions = new ArrayList<Permission>(dataPermissions);
currentDataPermissions = new ArrayList<Permission>(PermissionUtils.removeAll(currentDataPermissions, replaced));
currentDataPermissions.addAll(EntityPermissionUtils.getSeparatedPermissionsByResource(grantable).get(1));
if (userLevelEnabled) {
return getMutablePermissionTree(root, currentPermissions, currentDataPermissions, grantable, revokable, Marking.NEW, Marking.REMOVED, hideFieldLevel, addEmptyMessage);
} else {
currentPermissions = new ArrayList<Permission>(PermissionUtils.removeAll(currentPermissions, concat(replaced, revokable)));
return getImmutablePermissionTree(root, currentPermissions, currentDataPermissions, grantable, Marking.NEW, hideFieldLevel, addEmptyMessage);
}
}
protected Set<Permission> concat(Collection<Permission> current, Collection<Permission> added) {
Set<Permission> ret = new HashSet<Permission>();
ret.addAll(current);
ret.addAll(added);
return ret;
}
/**
*
* @param actionNode
* @param entityName
* @return
*/
// TODO enough size check?
protected boolean allChildFieldsListed(TreeNode actionNode, String entityName) {
if (actionNode.getChildCount() == 0) {
return true;
}
List<String> fields;
try {
fields = resourceMetamodel.getPrimitiveFields(entityName);
return actionNode.getChildCount() == fields.size();
} catch (ClassNotFoundException e) {
}
return false;
}
/**
*
* @param selectedPermissionNodes
* @return
*/
protected Set<Permission> getSelectedPermissions(TreeNode[] selectedPermissionNodes) {
return getSelectedPermissions(selectedPermissionNodes, null);
}
/**
*
* @param selectedPermissionNodes - selected tree nodes
* @param allFieldsListed - decides whether the permissions are selected from the resources view or from the group view. in
* the resources view all the fields are listed, while at the groups only the ones from the group
* @return list of permissions
*/
protected Set<Permission> getSelectedPermissions(TreeNode[] selectedPermissionNodes, TreeNode parentNode) {
Set<Permission> ret = new HashSet<Permission>();
List<TreeNode> sortedSelectedNodes = sortTreeNodesByType(selectedPermissionNodes);
for (TreeNode permissionNode : sortedSelectedNodes) {
TreeNodeModel permissionNodeData = (TreeNodeModel) permissionNode.getData();
switch (permissionNodeData.getType()) {
case ENTITY:
break;
case ACTION:
TreeNode entityNode = permissionNode.getParent();
TreeNodeModel entityNodeModel = (TreeNodeModel) entityNode.getData();
TreeNodeModel entityNodeData = (TreeNodeModel) entityNode.getParent().getData();
if ((parentNode != null && entityNodeData.equals(parentNode.getData())) || parentNode == null) {
// collection field resources are ALWAYS stored as entity field resources; all field childs selected
// then action node should be taken, if object resources specified action node should be taken
if (!permissionNodeData.getInstances().isEmpty()) {
// add object resources if exist
for (TreeNodeModel instance : permissionNodeData.getInstances()) {
if (instance.isSelected()) {
Permission actionPermission = permissionFactory.create((EntityAction) permissionNodeData.getTarget(), (EntityField) instance.getTarget());
ret.add(actionPermission);
}
}
} else {
if (!actionUtils.getUpdateActionsForCollectionField().contains(permissionNodeData.getTarget())
&& allChildFieldsListed(permissionNode, ((TreeNodeModel) entityNode.getData()).getName())) {
// add entity field resource
Permission actionPermission = permissionFactory.create((EntityAction) permissionNodeData.getTarget(), (EntityField) entityNodeModel.getTarget());
ret.add(actionPermission);
}
}
}
break;
case FIELD:
TreeNode actionNode = permissionNode.getParent();
TreeNodeModel actionNodeData = (TreeNodeModel) actionNode.getData();
TreeNode actionEntityNode = actionNode.getParent();
if ((parentNode != null && actionEntityNode.getParent().getData().equals(parentNode.getData())) || parentNode == null) {
if (!permissionNodeData.getInstances().isEmpty()) {
// add object resources if exist
for (TreeNodeModel instance : permissionNodeData.getInstances()) {
if (instance.isSelected()) {
Permission fieldPermission = permissionFactory.create((EntityAction) actionNodeData.getTarget(), (EntityField) instance.getTarget());
ret.add(fieldPermission);
}
}
} else {
Permission actionPermission = permissionFactory.create((EntityAction) actionNodeData.getTarget(), (EntityField) permissionNodeData.getTarget());
ret.add(actionPermission);
}
}
break;
default:
break;
}
}
return permissionHandling.getNormalizedPermissions(ret);
}
protected TreeNode getMutablePermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, boolean hideFieldLevel) {
TreeNode root = new DefaultTreeNode();
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, true, hideFieldLevel, true);
}
protected TreeNode getMutablePermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking) {
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, true, false, true);
}
protected TreeNode getMutablePermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, boolean hideFieldLevel) {
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, true, hideFieldLevel, true);
}
protected TreeNode getMutablePermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, boolean hideFieldLevel, boolean addEmptyMessage) {
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, true, hideFieldLevel,
addEmptyMessage);
}
protected TreeNode getImmutablePermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking) {
TreeNode root = new DefaultTreeNode();
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, false, false, true);
}
protected TreeNode getImmutablePermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, boolean hideFieldLevel) {
TreeNode root = new DefaultTreeNode();
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, false, hideFieldLevel, true);
}
protected TreeNode getImmutablePermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking) {
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, false, false, true);
}
protected TreeNode getImmutablePermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, boolean hideFieldLevel) {
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, false, hideFieldLevel, true);
}
protected TreeNode getImmutablePermissionTree(DefaultTreeNode groupNode, List<Permission> permissions, List<Permission> dataPermissions) {
return getPermissionTree(groupNode, permissions, dataPermissions, new HashSet<Permission>(), new HashSet<Permission>(), Marking.NONE, Marking.NONE, false, false, true);
}
protected TreeNode getImmutablePermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Marking selectedMarking) {
TreeNode root = new DefaultTreeNode();
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, new HashSet<Permission>(), selectedMarking, Marking.NONE, false, false, true);
}
protected TreeNode getImmutablePermissionTree(List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Marking selectedMarking, boolean hideFieldLevel) {
TreeNode root = new DefaultTreeNode();
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, new HashSet<Permission>(), selectedMarking, Marking.NONE, false, hideFieldLevel, true);
}
protected TreeNode getImmutablePermissionTree(List<Permission> permissions, List<Permission> dataPermissions, boolean hideFieldLevel) {
TreeNode root = new DefaultTreeNode();
return getPermissionTree(root, permissions, dataPermissions, new HashSet<Permission>(), new HashSet<Permission>(), Marking.NONE, Marking.NONE, false, hideFieldLevel, true);
}
protected TreeNode getImmutablePermissionTree(List<Permission> permissions, List<Permission> dataPermissions) {
TreeNode root = new DefaultTreeNode();
return getPermissionTree(root, permissions, dataPermissions, new HashSet<Permission>(), new HashSet<Permission>(), Marking.NONE, Marking.NONE, false, false, true);
}
protected TreeNode getImmutablePermissionTree(DefaultTreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Marking selectedMarking) {
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, new HashSet<Permission>(), selectedMarking, Marking.NONE, false, false, true);
}
protected TreeNode getImmutablePermissionTree(DefaultTreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Marking selectedMarking, boolean hideFieldLevel) {
return getPermissionTree(root, permissions, dataPermissions, selectedPermissions, new HashSet<Permission>(), selectedMarking, Marking.NONE, false, hideFieldLevel, true);
}
protected TreeNode getImmutablePermissionTree(DefaultTreeNode root, List<Permission> permissions, List<Permission> dataPermissions, boolean hideFieldLevel) {
return getPermissionTree(root, permissions, dataPermissions, new HashSet<Permission>(), new HashSet<Permission>(), Marking.NONE, Marking.NONE, false, hideFieldLevel, true);
}
protected TreeNode getImmutablePermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selected, Marking marking, boolean hideFieldLevel) {
return getPermissionTree(root, permissions, dataPermissions, selected, new HashSet<Permission>(), marking, Marking.NONE, false, hideFieldLevel, true);
}
protected TreeNode getImmutablePermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selected, Marking marking, boolean hideFieldLevel, boolean addEmptyMessage) {
return getPermissionTree(root, permissions, dataPermissions, selected, new HashSet<Permission>(), marking, Marking.NONE, false, hideFieldLevel, addEmptyMessage);
}
private TreeNode getPermissionTree(TreeNode root, List<Permission> permissions, List<Permission> dataPermissions, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, boolean selectable, boolean hideFieldLevel, boolean addEmptyMessage) {
// group permissions by entity
Map<String, List<Permission>> permissionMapByEntity = EntityPermissionUtils.groupPermissionsByResourceName(permissions);
Map<String, List<Permission>> dataPermissionMapByEntity = EntityPermissionUtils.groupPermissionsByResourceName(dataPermissions);
for (String entity : permissionMapByEntity.keySet()) {
List<Permission> dataPermisssionsByEntity = new ArrayList<Permission>();
if (dataPermissionMapByEntity.containsKey(entity)) {
dataPermisssionsByEntity = dataPermissionMapByEntity.get(entity);
dataPermissionMapByEntity.remove(entity);
}
List<Permission> permissionsByEntity = permissionMapByEntity.get(entity);
// create entity node
createEntityNode(root, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, entity, permissionsByEntity, dataPermisssionsByEntity,
selectable, hideFieldLevel);
}
for (String entity : dataPermissionMapByEntity.keySet()) {
List<Permission> permissionsByEntity = dataPermissionMapByEntity.get(entity);
// create entity node
createEntityNode(root, selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, entity, new ArrayList<Permission>(), permissionsByEntity,
selectable, hideFieldLevel);
}
if (addEmptyMessage && root.getChildCount() == 0) {
new DefaultTreeNode(new TreeNodeModel("No permissions available", null, null), root).setSelectable(false);
}
return root;
}
private void createEntityNode(TreeNode root, Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, String entity, List<Permission> permissionsByEntity, List<Permission> dataPermisssionsByEntity, boolean selectable, boolean hideFieldLevel) {
EntityField entityField = (EntityField) entityFieldFactory.createResource(entity);
TreeNode entityNode = new DefaultTreeNode(new TreeNodeModel(entity, TreeNodeModel.ResourceType.ENTITY, entityField), root);
entityNode.setExpanded(true);
// group again by action
Map<Action, List<Permission>> permissionMapByAction = EntityPermissionUtils.groupResourcePermissionsByAction(permissionsByEntity);
Map<Action, List<Permission>> dataPermissionMapByAction = EntityPermissionUtils.groupResourcePermissionsByAction(dataPermisssionsByEntity);
for (Action action : permissionMapByAction.keySet()) {
List<Permission> dataPermissionsByAction = new ArrayList<Permission>();
if (dataPermissionMapByAction.containsKey(action)) {
dataPermissionsByAction = dataPermissionMapByAction.get(action);
dataPermissionMapByAction.remove(action);
}
List<Permission> permissionsByAction = permissionMapByAction.get(action);
createActionNode(selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, entity, entityField, entityNode, permissionsByAction,
dataPermissionsByAction, action, selectable, hideFieldLevel);
}
for (Action action : dataPermissionMapByAction.keySet()) {
List<Permission> dataPermissionsByAction = dataPermissionMapByAction.get(action);
createActionNode(selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, entity, entityField, entityNode, new ArrayList<Permission>(),
dataPermissionsByAction, action, selectable, hideFieldLevel);
}
propagateNodePropertiesTo(entityNode);
}
private void createActionNode(Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, String entity, EntityField entityField, TreeNode entityNode, List<Permission> permissionsByAction, List<Permission> dataPermissionsByAction, Action action, boolean selectable, boolean hideFieldLevel) {
EntityAction entityAction = (EntityAction) action;
TreeNodeModel actionNodeModel = new TreeNodeModel(entityAction.getName(), TreeNodeModel.ResourceType.ACTION, entityAction);
DefaultTreeNode actionNode = new DefaultTreeNode(actionNodeModel, entityNode);
actionNode.setExpanded(true);
// create field nodes
createFieldNodes(selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, permissionsByAction, actionNode, selectable, hideFieldLevel);
createFieldNodes(selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, dataPermissionsByAction, actionNode, selectable, hideFieldLevel);
// add revoked field nodes
addRevokedFields(notSelectedPermissions, entity, entityField, permissionsByAction, actionNode);
addRevokedFields(notSelectedPermissions, entity, entityField, dataPermissionsByAction, actionNode);
propagateNodePropertiesTo(actionNode);
}
private void createFieldNodes(Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, List<Permission> permissionsByAction, DefaultTreeNode actionNode, boolean selectable, boolean hideFieldLevel) {
Map<String, List<Permission>> fieldPermissions = EntityPermissionUtils.groupEntityPermissionsByField(permissionsByAction);
for (String field : fieldPermissions.keySet()) {
List<Permission> permissionsByField = fieldPermissions.get(field); // list contains at least 1 element, otherwise
// wouldnt be in the map
// add entity fields if there are any
if (!field.equals(EntityField.EMPTY_FIELD)) {
if (hideFieldLevel) {
TreeNodeModel actionNodeModel = (TreeNodeModel) actionNode.getData();
actionNodeModel.setMarking(Marking.OBJECT);
if (!StringUtils.isEmpty(actionNodeModel.getTooltip()) && !StringUtils.equals(actionNodeModel.getTooltip(), Constants.CONTAINS_FIELDS)) {
actionNodeModel.setTooltip(actionNodeModel.getTooltip() + ", " + Constants.CONTAINS_FIELDS);
}else{
actionNodeModel.setTooltip(Constants.CONTAINS_FIELDS);
}
actionNode.setType("field");
adjustActionNode(selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, actionNode, permissionsByField, selectable);
} else {
createFieldNode(selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, actionNode, permissionsByField, selectable);
}
} else {
// no field nodes -> fix action node
adjustActionNode(selectedPermissions, notSelectedPermissions, selectedMarking, notSelectedMarking, actionNode, permissionsByField, selectable);
}
}
}
private void adjustActionNode(Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking selectedMarking, Marking notSelectedMarking, DefaultTreeNode actionNode, List<Permission> permissions, boolean selectable) {
// Permission permission = permissions.get(0);
TreeNodeModel actionNodeModel = ((TreeNodeModel) actionNode.getData());
for (Permission permission : permissions) {
if (permission.getResource() instanceof EntityObjectField) {
TreeNodeModel instanceModel = new TreeNodeModel(((EntityField) permission.getResource()).getField(), TreeNodeModel.ResourceType.FIELD, permission.getResource(),
PermissionUtils.contains(selectedPermissions, permission));
// instanceModel
// .setTooltip(permissionHandling.contains(selectedPermissions, permission) ? "New!" :
// (permissionHandling.contains(notSelectedPermissions, permission) ? "Removed!" : "Existing!"));
actionNodeModel.getInstances().add(instanceModel);
actionNodeModel.setMarking(Marking.OBJECT);
if (!StringUtils.isEmpty(actionNodeModel.getTooltip()) && !StringUtils.equals(actionNodeModel.getTooltip(), Constants.CONTAINS_OBJECTS)) {
actionNodeModel.setTooltip(actionNodeModel.getTooltip() + ", " + Constants.CONTAINS_OBJECTS);
}else{
actionNodeModel.setTooltip(Constants.CONTAINS_OBJECTS);
}
actionNode.setType("object");
// object level permission cannot be optionally granted or revoked.they are just displayed for info and will be
// granted or revoked implicitly. TODO find a better solution! at this point there could be 3 object level
// permissions on the same treenode: 1 in added state, 1 in to be removed state and 1 in existing state. this
// cannot be displayed in one single node.
actionNode.setSelectable(false);
// mark it selected so that it will be processed
actionNode.setSelected(true);
}
if (PermissionUtils.contains(selectedPermissions, permission)) {
actionNodeModel.setMarking(selectedMarking);
actionNode.setSelected(true);
actionNode.setSelectable(selectable);
} else {
if (PermissionUtils.contains(notSelectedPermissions, permission)) {
actionNode.setSelected(false);
actionNodeModel.setMarking(notSelectedMarking);
actionNode.setSelectable(selectable);
} else {
// already existing entity permission
// actionNode.setSelected(true);
actionNode.setSelectable(false);
}
}
}
}
private void createFieldNode(Set<Permission> selectedPermissions, Set<Permission> notSelectedPermissions, Marking marking1, Marking marking2, DefaultTreeNode actionNode, List<Permission> permissions, boolean selectable) {
// there is always only one field node added at first without any marking, so just take first permissions resource
Permission permission = permissions.get(0);
TreeNodeModel fieldNodeModel = new TreeNodeModel(((EntityField) permission.getResource()).getField(), TreeNodeModel.ResourceType.FIELD, permission.getResource());
DefaultTreeNode fieldNode = new DefaultTreeNode(fieldNodeModel, actionNode);
// field resource: entity field or entity object field -> paint it blue, add tooltip, color might be overwritten
for (Permission fieldPermission : permissions) {
boolean isNewPermission = PermissionUtils.contains(selectedPermissions, fieldPermission);
boolean isRemovedPermission = PermissionUtils.contains(notSelectedPermissions, fieldPermission);
if (permission.getResource() instanceof EntityObjectField) {
TreeNodeModel instanceModel = new TreeNodeModel(((EntityField) permission.getResource()).getField(), TreeNodeModel.ResourceType.FIELD,
fieldPermission.getResource(), isNewPermission);
instanceModel.setTooltip(isNewPermission ? "New!" : (isRemovedPermission ? "Removed!" : ""));
fieldNodeModel.getInstances().add(instanceModel);
fieldNodeModel.setMarking(Marking.OBJECT);
fieldNodeModel.setTooltip(Constants.CONTAINS_OBJECTS);
fieldNode.setType("object");
// object level permission cannot be optionally granted or revoked.they are just displayed for info and will be
// granted or revoked implicitly. TODO find a better solution! at this point there could be 3 object level
// permissions on the same treenode: 1 in added state, 1 in to be removed state and 1 in existing state. this
// cannot be displayed in one single node.
fieldNode.setSelectable(false);
// mark it selected so that it will be processed
fieldNode.setSelected(true);
}
// mark and select permission on field level-> will be propagated upwards at the end
if (isNewPermission) {
fieldNodeModel.setMarking(marking1);
fieldNode.setSelected(true);
fieldNode.setSelectable(selectable);
} else {
if (isRemovedPermission) {
fieldNode.setSelected(false);
fieldNodeModel.setMarking(marking2);
fieldNode.setSelectable(selectable);
} else {
// already existing permission for this resource
// fieldNode.setSelected(true);
fieldNode.setSelectable(false);
}
}
}
}
private void addRevokedFields(Set<Permission> notSelectedPermissions, String entity, EntityField entityField, List<Permission> permissionsByAction, DefaultTreeNode actionNode) {
if (permissionsByAction.isEmpty()) {
return;
}
// TODO: Here we assume that the first permission in permissionsByAction is also the parent
Permission parent = permissionsByAction.iterator().next();
Map<Resource, Collection<Permission>> families = EntityPermissionUtils.groupByParents(permissionsByAction);
Collection<Permission> familyChildren = families.get(parent);
// PermissionFamily family = resourceUtils.getSeparatedParentAndChildEntityPermissions(permissionsByAction);
if (parent != null && PermissionUtils.contains(notSelectedPermissions, parent) && familyChildren != null && !familyChildren.isEmpty()) {
// entity permission is revoked, but separate field permissions are granted
Set<Permission> children = permissionHandling.getAvailableChildPermissions(parent);
for (Permission child : children) {
if (!PermissionUtils.contains(familyChildren, child)) {
EntityField resource = (EntityField) child.getResource();
TreeNodeModel fieldNodeModel = new TreeNodeModel(resource.getField(), TreeNodeModel.ResourceType.FIELD, resource, Marking.REMOVED);
new DefaultTreeNode(fieldNodeModel, actionNode);
}
}
}
}
}
| |
package com.brentvatne.react;
import android.media.MediaPlayer;
import android.os.Handler;
import android.util.Log;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.yqritc.scalablevideoview.ScalableType;
import com.yqritc.scalablevideoview.ScalableVideoView;
public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnPreparedListener, MediaPlayer
.OnErrorListener, MediaPlayer.OnBufferingUpdateListener, MediaPlayer.OnCompletionListener {
public enum Events {
EVENT_LOAD_START("onVideoLoadStart"),
EVENT_LOAD("onVideoLoad"),
EVENT_ERROR("onVideoError"),
EVENT_PROGRESS("onVideoProgress"),
EVENT_SEEK("onVideoSeek"),
EVENT_END("onVideoEnd");
private final String mName;
Events(final String name) {
mName = name;
}
@Override
public String toString() {
return mName;
}
}
public static final String EVENT_PROP_FAST_FORWARD = "canPlayFastForward";
public static final String EVENT_PROP_SLOW_FORWARD = "canPlaySlowForward";
public static final String EVENT_PROP_SLOW_REVERSE = "canPlaySlowReverse";
public static final String EVENT_PROP_REVERSE = "canPlayReverse";
public static final String EVENT_PROP_STEP_FORWARD = "canStepForward";
public static final String EVENT_PROP_STEP_BACKWARD = "canStepBackward";
public static final String EVENT_PROP_DURATION = "duration";
public static final String EVENT_PROP_PLAYABLE_DURATION = "playableDuration";
public static final String EVENT_PROP_CURRENT_TIME = "currentTime";
public static final String EVENT_PROP_SEEK_TIME = "seekTime";
public static final String EVENT_PROP_ERROR = "error";
public static final String EVENT_PROP_WHAT = "what";
public static final String EVENT_PROP_EXTRA = "extra";
private ThemedReactContext mThemedReactContext;
private RCTEventEmitter mEventEmitter;
private Handler mProgressUpdateHandler = new Handler();
private Runnable mProgressUpdateRunnable = null;
private String mSrcUriString = null;
private String mSrcType = "mp4";
private boolean mSrcIsNetwork = false;
private boolean mSrcIsAsset = false;
private ScalableType mResizeMode = ScalableType.LEFT_TOP;
private boolean mRepeat = false;
private boolean mPaused = false;
private boolean mMuted = false;
private float mVolume = 1.0f;
private float mRate = 1.0f;
private boolean mMediaPlayerValid = false; // True if mMediaPlayer is in prepared, started, or paused state.
private int mVideoDuration = 0;
private int mVideoBufferedDuration = 0;
public ReactVideoView(ThemedReactContext themedReactContext) {
super(themedReactContext);
mThemedReactContext = themedReactContext;
mEventEmitter = themedReactContext.getJSModule(RCTEventEmitter.class);
initializeMediaPlayerIfNeeded();
setSurfaceTextureListener(this);
mProgressUpdateRunnable = new Runnable() {
@Override
public void run() {
if (mMediaPlayerValid) {
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_CURRENT_TIME, mMediaPlayer.getCurrentPosition() / 1000.0);
event.putDouble(EVENT_PROP_PLAYABLE_DURATION, mVideoBufferedDuration / 1000.0); //TODO:mBufferUpdateRunnable
mEventEmitter.receiveEvent(getId(), Events.EVENT_PROGRESS.toString(), event);
}
mProgressUpdateHandler.postDelayed(mProgressUpdateRunnable, 250);
}
};
mProgressUpdateHandler.post(mProgressUpdateRunnable);
}
private void initializeMediaPlayerIfNeeded() {
if (mMediaPlayer == null) {
mMediaPlayerValid = false;
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setScreenOnWhilePlaying(true);
mMediaPlayer.setOnVideoSizeChangedListener(this);
mMediaPlayer.setOnErrorListener(this);
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setOnBufferingUpdateListener(this);
mMediaPlayer.setOnCompletionListener(this);
}
}
public void setSrc(final String uriString, final String type, final boolean isNetwork, final boolean isAsset) {
mSrcUriString = uriString;
mSrcType = type;
mSrcIsNetwork = isNetwork;
mSrcIsAsset = isAsset;
mMediaPlayerValid = false;
mVideoDuration = 0;
mVideoBufferedDuration = 0;
initializeMediaPlayerIfNeeded();
mMediaPlayer.reset();
try {
if (isNetwork || isAsset) {
setDataSource(uriString);
} else {
setRawData(mThemedReactContext.getResources().getIdentifier(
uriString,
"raw",
mThemedReactContext.getPackageName()
));
}
} catch (Exception e) {
e.printStackTrace();
return;
}
WritableMap src = Arguments.createMap();
src.putString(ReactVideoViewManager.PROP_SRC_URI, uriString);
src.putString(ReactVideoViewManager.PROP_SRC_TYPE, type);
src.putBoolean(ReactVideoViewManager.PROP_SRC_IS_NETWORK, isNetwork);
WritableMap event = Arguments.createMap();
event.putMap(ReactVideoViewManager.PROP_SRC, src);
mEventEmitter.receiveEvent(getId(), Events.EVENT_LOAD_START.toString(), event);
prepareAsync(this);
}
public void setResizeModeModifier(final ScalableType resizeMode) {
mResizeMode = resizeMode;
if (mMediaPlayerValid) {
setScalableType(resizeMode);
invalidate();
}
}
public void setRepeatModifier(final boolean repeat) {
mRepeat = repeat;
if (mMediaPlayerValid) {
setLooping(repeat);
}
}
public void setPausedModifier(final boolean paused) {
mPaused = paused;
if (!mMediaPlayerValid) {
return;
}
if (mPaused) {
if (mMediaPlayer.isPlaying()) {
pause();
}
} else {
if (!mMediaPlayer.isPlaying()) {
start();
}
}
}
public void setMutedModifier(final boolean muted) {
mMuted = muted;
if (!mMediaPlayerValid) {
return;
}
if (mMuted) {
setVolume(0, 0);
} else {
setVolume(mVolume, mVolume);
}
}
public void setVolumeModifier(final float volume) {
mVolume = volume;
setMutedModifier(mMuted);
}
public void setRateModifier(final float rate) {
mRate = rate;
if (mMediaPlayerValid) {
// TODO: Implement this.
Log.e(ReactVideoViewManager.REACT_CLASS, "Setting playback rate is not yet supported on Android");
}
}
public void applyModifiers() {
setResizeModeModifier(mResizeMode);
setRepeatModifier(mRepeat);
setPausedModifier(mPaused);
setMutedModifier(mMuted);
// setRateModifier(mRate);
}
@Override
public void onPrepared(MediaPlayer mp) {
mMediaPlayerValid = true;
mVideoDuration = mp.getDuration();
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_DURATION, mVideoDuration / 1000.0);
event.putDouble(EVENT_PROP_CURRENT_TIME, mp.getCurrentPosition() / 1000.0);
// TODO: Actually check if you can.
event.putBoolean(EVENT_PROP_FAST_FORWARD, true);
event.putBoolean(EVENT_PROP_SLOW_FORWARD, true);
event.putBoolean(EVENT_PROP_SLOW_REVERSE, true);
event.putBoolean(EVENT_PROP_REVERSE, true);
event.putBoolean(EVENT_PROP_FAST_FORWARD, true);
event.putBoolean(EVENT_PROP_STEP_BACKWARD, true);
event.putBoolean(EVENT_PROP_STEP_FORWARD, true);
mEventEmitter.receiveEvent(getId(), Events.EVENT_LOAD.toString(), event);
applyModifiers();
}
@Override
public boolean onError(MediaPlayer mp, int what, int extra) {
WritableMap error = Arguments.createMap();
error.putInt(EVENT_PROP_WHAT, what);
error.putInt(EVENT_PROP_EXTRA, extra);
WritableMap event = Arguments.createMap();
event.putMap(EVENT_PROP_ERROR, error);
mEventEmitter.receiveEvent(getId(), Events.EVENT_ERROR.toString(), event);
return true;
}
@Override
public void onBufferingUpdate(MediaPlayer mp, int percent) {
mVideoBufferedDuration = (int) Math.round((double) (mVideoDuration * percent) / 100.0);
}
@Override
public void seekTo(int msec) {
if (mMediaPlayerValid) {
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_CURRENT_TIME, getCurrentPosition() / 1000.0);
event.putDouble(EVENT_PROP_SEEK_TIME, msec / 1000.0);
mEventEmitter.receiveEvent(getId(), Events.EVENT_SEEK.toString(), event);
super.seekTo(msec);
}
}
@Override
public void onCompletion(MediaPlayer mp) {
mMediaPlayerValid = false;
mEventEmitter.receiveEvent(getId(), Events.EVENT_END.toString(), null);
}
@Override
protected void onDetachedFromWindow() {
mMediaPlayerValid = false;
super.onDetachedFromWindow();
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
setSrc(mSrcUriString, mSrcType, mSrcIsNetwork, mSrcIsAsset);
}
}
| |
package nl.wouter0100.one2xs;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.support.design.widget.NavigationView;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.io.FileNotFoundException;
import nl.wouter0100.one2xs.adapters.UserSyncAdapter;
import nl.wouter0100.one2xs.fragments.ForumsFragment;
import nl.wouter0100.one2xs.fragments.MessagesFragment;
import nl.wouter0100.one2xs.fragments.SectionsFragment;
import nl.wouter0100.one2xs.fragments.ThreadsFragment;
import nl.wouter0100.one2xs.models.Forum;
public class MainActivity extends AppCompatActivity
implements NavigationView.OnNavigationItemSelectedListener,
ForumsFragment.OnForumInteractionListener,
MessagesFragment.OnMessageInteractionListener,
FragmentManager.OnBackStackChangedListener {
private AccountManager mAccountManager;
private Account mAccount;
private Context mContext;
private NavigationView mNavigationView;
private BroadcastReceiver mUserSyncFinishedReceiver;
private FragmentManager mFragmentManager;
private ActionBarDrawerToggle mDrawerToggle;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Get and set application context
mContext = getApplicationContext();
// Get and set fragment manager
mFragmentManager = getSupportFragmentManager();
mFragmentManager.addOnBackStackChangedListener(this);
// Get account manager and all our accounts
mAccountManager = AccountManager.get(mContext);
// Toolbar stuff
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
// Connect drawer
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
mDrawerToggle = new ActionBarDrawerToggle(this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
drawer.setDrawerListener(mDrawerToggle);
mDrawerToggle.setToolbarNavigationClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mFragmentManager.popBackStack();
}
});
mDrawerToggle.syncState();
// Navigation
mNavigationView = (NavigationView) findViewById(R.id.nav_view);
mNavigationView.setNavigationItemSelectedListener(this);
// First page is the Forum..
mNavigationView.getMenu().getItem(0).setChecked(true);
onNavigationItemSelected(mNavigationView.getMenu().getItem(0));
shouldDisplayHomeUp();
}
@Override
protected void onResume() {
super.onResume();
Account[] accounts = mAccountManager.getAccounts();
View navigationHeader = mNavigationView.getHeaderView(0);
if (accounts.length >= 1) {
// Always use first account
mAccount = accounts[0];
if (accounts.length >= 2) {
// Show Toast that only one account is allowed
Toast.makeText(mContext, R.string.one_account_allowed, Toast.LENGTH_LONG).show();
}
// Make some items visible, just to be sure it's there
mNavigationView.getMenu().getItem(1).setVisible(true);
setUserDetails();
mUserSyncFinishedReceiver = new BroadcastReceiver(){
@Override
public void onReceive(Context context, Intent intent) {
setUserDetails();
}
};
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(UserSyncAdapter.FINISHED);
mContext.registerReceiver(mUserSyncFinishedReceiver, intentFilter);
} else {
// No accounts yet
// Make some items invisible
mNavigationView.getMenu().getItem(1).setVisible(false);
// Set on click for when a user clicks the header
navigationHeader.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mAccountManager.addAccount(One2xsAuthenticator.ACCOUNT_TYPE, One2xsAuthenticator.AUTHTOKEN_TYPE, null, null, MainActivity.this, null, null);
}
});
}
}
@Override
protected void onPause() {
super.onPause();
if (mUserSyncFinishedReceiver != null) {
mContext.unregisterReceiver(mUserSyncFinishedReceiver);
}
}
private void setUserDetails() {
View navigationHeader = mNavigationView.getHeaderView(0);
// Get all required views we need to modify
TextView usernameView = (TextView) navigationHeader.findViewById(R.id.text_username);
TextView statusView = (TextView) navigationHeader.findViewById(R.id.text_status);
ImageView avatarView = (ImageView) navigationHeader.findViewById(R.id.image_avatar);
// Set all views values
usernameView.setText(mAccount.name);
String status = mAccountManager.getUserData(mAccount, "status");
statusView.setText((status == null) ? getText(R.string.still_receiving) : status);
try {
Bitmap avatar = BitmapFactory.decodeStream(mContext.openFileInput("avatar.png"));
avatarView.setImageBitmap(avatar);
} catch (FileNotFoundException e) {
// avatar not found
}
}
@Override
public void onBackPressed() {
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
if (drawer.isDrawerOpen(GravityCompat.START)) {
drawer.closeDrawer(GravityCompat.START);
} else {
super.onBackPressed();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@SuppressWarnings("StatementWithEmptyBody")
@Override
public boolean onNavigationItemSelected(MenuItem item) {
Fragment fragment = null;
try {
// Handle navigation view item clicks here.
switch (item.getItemId()) {
case R.id.nav_forum:
fragment = SectionsFragment.newInstance();
break;
case R.id.nav_messages:
fragment = MessagesFragment.newInstance();
break;
}
} catch (Exception e) {
e.printStackTrace();
}
//Clear back stack
for(int i = 0; i < mFragmentManager.getBackStackEntryCount(); ++i) {
mFragmentManager.popBackStack();
}
// Commit new fragment
mFragmentManager.beginTransaction().replace(R.id.fragment_content, fragment).commit();
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
drawer.closeDrawer(GravityCompat.START);
return true;
}
@Override
public void onBackStackChanged() {
shouldDisplayHomeUp();
}
public void shouldDisplayHomeUp(){
//Enable Up button only if there are entries in the back stack
boolean canBack = getSupportFragmentManager().getBackStackEntryCount() > 0;
mDrawerToggle.setDrawerIndicatorEnabled(!canBack);
getSupportActionBar().setDisplayHomeAsUpEnabled(canBack);
// Resync state
if (!canBack) {
mDrawerToggle.syncState();
}
}
/**
* Listener for when a user selects a forum
*
* @param forum Forum to load
*/
@Override
public void onForumSelected(Forum forum) {
Fragment fragment = ThreadsFragment.newInstance(forum);
mFragmentManager.beginTransaction().replace(R.id.fragment_content, fragment).addToBackStack(null).commit();
}
/**
* Listener for when a user clicks a message
*
* @param id Id of the message to load
* @param title Title of the message to load
*/
@Override
public void onMessageClicked(int id, String title) {
}
}
| |
package com.ericsson.research.trap.spi.transports;
/*
* ##_BEGIN_LICENSE_##
* Transport Abstraction Package (trap)
* ----------
* Copyright (C) 2014 Ericsson AB
* ----------
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the Ericsson AB nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* ##_END_LICENSE_##
*/
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashSet;
import com.ericsson.research.trap.TrapException;
import com.ericsson.research.trap.spi.TrapMessage;
import com.ericsson.research.trap.spi.TrapTransportException;
import com.ericsson.research.trap.spi.TrapTransportProtocol;
import com.ericsson.research.trap.spi.TrapTransportState;
import com.ericsson.research.trap.spi.ws.netty.WebServerSocketHandler;
import com.ericsson.research.trap.utils.StringUtil;
import com.ericsson.research.trap.utils.ThreadPool;
public class WebSocketTransport extends AbstractTransport
{
private static final String OPTION_BINARY = "binary";
private WebServerSocketHandler socket;
@SuppressWarnings("unused")
private boolean binary = true;
private long lastSend = 0;
private boolean delayed = false;
private boolean delayQueued = false;
private boolean useDelay = false;
ByteArrayOutputStream outBuf = new ByteArrayOutputStream();
// Create a new SocketTransport for receiving (=server)
public WebSocketTransport(WebServerSocketHandler socket)
{
this.socket = socket;
socket.t = this;
this.transportPriority = 0;
this.state = TrapTransportState.CONNECTED;
}
public String getTransportName()
{
return "websocket";
}
@Override
public String getProtocolName()
{
return TrapTransportProtocol.WEBSOCKET;
}
@Override
public void internalSend(TrapMessage message, boolean expectMore) throws TrapTransportException
{
try
{
WebServerSocketHandler mSock = this.socket;
if (mSock == null)
throw new TrapTransportException(message, this.getState());
synchronized (mSock)
{
byte[] raw = message.serialize();
this.delayed |= this.lastSend == System.currentTimeMillis();
this.delayed &= this.useDelay;
if (this.delayed && !this.delayQueued)
{
this.delayQueued = true;
ThreadPool.executeAfter(new Runnable() {
@Override
public void run()
{
WebSocketTransport.this.flushTransport();
}
}, 1);
}
if (expectMore)
{
if (this.outBuf == null)
this.outBuf = new ByteArrayOutputStream();
this.outBuf.write(raw);
return;
}
this.performSend(raw);
}
}
catch (IOException e)
{
this.logger.debug(e.toString());
this.setState(TrapTransportState.ERROR);
throw new TrapTransportException(message, this.state);
}
catch (TrapTransportException e)
{
this.setState(TrapTransportState.ERROR);
throw e;
}
catch (Throwable t)
{
t.printStackTrace();
}
}
private void performSend(byte[] raw) throws IOException
{
WebServerSocketHandler mSock = this.socket;
if (this.outBuf != null)
{
this.outBuf.write(raw);
raw = this.outBuf.toByteArray();
this.outBuf = null;
}
//char[] encoded = Base64.encode(raw);
mSock.send(raw);
this.lastSend = System.currentTimeMillis();
this.delayed = this.delayQueued = false;
}
@Override
protected synchronized void internalDisconnect()
{
synchronized (this.socket)
{
//this.socket.close();
}
this.setState(TrapTransportState.DISCONNECTING);
}
public void notifyError()
{
System.out.println("Error!");
this.setState(TrapTransportState.ERROR);
}
public void notifyOpen()
{
this.setState(TrapTransportState.CONNECTED);
}
public void notifyClose()
{
this.setState(TrapTransportState.DISCONNECTED);
this.socket = null;
}
public void notifyMessage(String string)
{
//byte[] decoded = Base64.decode(string);
// Disable binary mode, to prevent us from confusing the browser
this.binary = false;
byte[] decoded = StringUtil.toUtfBytes(string);
this.receive(decoded, 0, decoded.length);
}
public void notifyMessage(byte[] data)
{
// Ensure binary mode is activated for correct response
this.binary = true;
this.receive(data, 0, data.length);
}
// TODO: Expose IP information on websocket level...
public void fillAuthenticationKeys(HashSet<String> keys)
{
super.fillAuthenticationKeys(keys);
}
protected void updateConfig()
{
String eString = this.getOption(OPTION_BINARY);
if (eString != null)
{
try
{
this.binary = Boolean.parseBoolean(eString);
}
catch (Exception e)
{
this.logger.warn("Failed to parse transport {} binary flag", this.getTransportName(), e);
}
}
}
public void finalize()
{
}
@Override
protected void internalConnect() throws TrapException
{
}
@Override
public void flushTransport()
{
synchronized (WebSocketTransport.this.socket)
{
try
{
byte[] raw = WebSocketTransport.this.outBuf.toByteArray();
WebSocketTransport.this.outBuf = null;
WebSocketTransport.this.performSend(raw);
}
catch (IOException e)
{
WebSocketTransport.this.logger.debug(e.toString());
WebSocketTransport.this.forceError();
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.warmer;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.collect.ImmutableList;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.segments.IndexSegments;
import org.elasticsearch.action.admin.indices.segments.IndexShardSegments;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse;
import org.elasticsearch.action.admin.indices.segments.ShardSegments;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerResponse;
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse;
import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.engine.Segment;
import org.elasticsearch.index.mapper.FieldMapper.Loading;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.warmer.IndexWarmerMissingException;
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.util.Locale;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.*;
public class SimpleIndicesWarmerTests extends ElasticsearchIntegrationTest {
@Test
public void simpleWarmerTests() {
createIndex("test");
ensureGreen();
PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("warmer_1")
.setSearchRequest(client().prepareSearch("test").setTypes("a1").setQuery(QueryBuilders.termQuery("field", "value1")))
.execute().actionGet();
assertThat(putWarmerResponse.isAcknowledged(), equalTo(true));
putWarmerResponse = client().admin().indices().preparePutWarmer("warmer_2")
.setSearchRequest(client().prepareSearch("test").setTypes("a2").setQuery(QueryBuilders.termQuery("field", "value2")))
.execute().actionGet();
assertThat(putWarmerResponse.isAcknowledged(), equalTo(true));
client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet();
GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("tes*")
.execute().actionGet();
assertThat(getWarmersResponse.getWarmers().size(), equalTo(1));
assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(2));
assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1"));
assertThat(getWarmersResponse.getWarmers().get("test").get(1).name(), equalTo("warmer_2"));
getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_*")
.execute().actionGet();
assertThat(getWarmersResponse.getWarmers().size(), equalTo(1));
assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(2));
assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1"));
assertThat(getWarmersResponse.getWarmers().get("test").get(1).name(), equalTo("warmer_2"));
getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_1")
.execute().actionGet();
assertThat(getWarmersResponse.getWarmers().size(), equalTo(1));
assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1));
assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1"));
getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_2")
.execute().actionGet();
assertThat(getWarmersResponse.getWarmers().size(), equalTo(1));
assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1));
assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_2"));
getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addTypes("a*").addWarmers("warmer_2")
.execute().actionGet();
assertThat(getWarmersResponse.getWarmers().size(), equalTo(1));
assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1));
assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_2"));
getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addTypes("a1").addWarmers("warmer_2")
.execute().actionGet();
assertThat(getWarmersResponse.getWarmers().size(), equalTo(0));
}
@Test
public void templateWarmer() {
client().admin().indices().preparePutTemplate("template_1")
.setSource("{\n" +
" \"template\" : \"*\",\n" +
" \"warmers\" : {\n" +
" \"warmer_1\" : {\n" +
" \"types\" : [],\n" +
" \"source\" : {\n" +
" \"query\" : {\n" +
" \"match_all\" : {}\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
"}")
.execute().actionGet();
createIndex("test");
ensureGreen();
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
IndexWarmersMetaData warmersMetaData = clusterState.metaData().index("test").custom(IndexWarmersMetaData.TYPE);
assertThat(warmersMetaData, Matchers.notNullValue());
assertThat(warmersMetaData.entries().size(), equalTo(1));
client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet();
}
@Test
public void createIndexWarmer() {
assertAcked(prepareCreate("test")
.setSource("{\n" +
" \"warmers\" : {\n" +
" \"warmer_1\" : {\n" +
" \"types\" : [],\n" +
" \"source\" : {\n" +
" \"query\" : {\n" +
" \"match_all\" : {}\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
"}"));
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
IndexWarmersMetaData warmersMetaData = clusterState.metaData().index("test").custom(IndexWarmersMetaData.TYPE);
assertThat(warmersMetaData, Matchers.notNullValue());
assertThat(warmersMetaData.entries().size(), equalTo(1));
client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet();
}
@Test
public void deleteNonExistentIndexWarmerTest() {
createIndex("test");
try {
client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("foo").execute().actionGet();
fail("warmer foo should not exist");
} catch (IndexWarmerMissingException ex) {
assertThat(ex.names()[0], equalTo("foo"));
}
}
@Test
public void deleteIndexWarmerTest() {
createIndex("test");
ensureGreen();
PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer")
.setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery()))
.get();
assertThat(putWarmerResponse.isAcknowledged(), equalTo(true));
GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("test").get();
assertThat(getWarmersResponse.warmers().size(), equalTo(1));
ObjectObjectCursor<String, ImmutableList<IndexWarmersMetaData.Entry>> entry = getWarmersResponse.warmers().iterator().next();
assertThat(entry.key, equalTo("test"));
assertThat(entry.value.size(), equalTo(1));
assertThat(entry.value.iterator().next().name(), equalTo("custom_warmer"));
DeleteWarmerResponse deleteWarmerResponse = client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("custom_warmer").get();
assertThat(deleteWarmerResponse.isAcknowledged(), equalTo(true));
getWarmersResponse = client().admin().indices().prepareGetWarmers("test").get();
assertThat(getWarmersResponse.warmers().size(), equalTo(0));
}
@Test // issue 3246
public void ensureThatIndexWarmersCanBeChangedOnRuntime() throws Exception {
createIndex("test");
ensureGreen();
PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer")
.setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery()))
.execute().actionGet();
assertThat(putWarmerResponse.isAcknowledged(), equalTo(true));
client().prepareIndex("test", "test", "1").setSource("foo", "bar").setRefresh(true).execute().actionGet();
logger.info("--> Disabling warmers execution");
client().admin().indices().prepareUpdateSettings("test").setSettings(ImmutableSettings.builder().put("index.warmer.enabled", false)).execute().actionGet();
long warmerRunsAfterDisabling = getWarmerRuns();
assertThat(warmerRunsAfterDisabling, greaterThanOrEqualTo(1L));
client().prepareIndex("test", "test", "2").setSource("foo2", "bar2").setRefresh(true).execute().actionGet();
assertThat(getWarmerRuns(), equalTo(warmerRunsAfterDisabling));
}
@Test
public void gettingAllWarmersUsingAllAndWildcardsShouldWork() throws Exception {
createIndex("test");
ensureGreen();
PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer")
.setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery()))
.execute().actionGet();
assertThat(putWarmerResponse.isAcknowledged(), equalTo(true));
PutWarmerResponse anotherPutWarmerResponse = client().admin().indices().preparePutWarmer("second_custom_warmer")
.setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery()))
.execute().actionGet();
assertThat(anotherPutWarmerResponse.isAcknowledged(), equalTo(true));
GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("*").addWarmers("*").get();
assertThat(getWarmersResponse.warmers().size(), is(1));
getWarmersResponse = client().admin().indices().prepareGetWarmers("_all").addWarmers("_all").get();
assertThat(getWarmersResponse.warmers().size(), is(1));
getWarmersResponse = client().admin().indices().prepareGetWarmers("t*").addWarmers("c*").get();
assertThat(getWarmersResponse.warmers().size(), is(1));
getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("custom_warmer", "second_custom_warmer").get();
assertThat(getWarmersResponse.warmers().size(), is(1));
}
private long getWarmerRuns() {
IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setWarmer(true).execute().actionGet();
return indicesStatsResponse.getIndex("test").getPrimaries().warmer.total();
}
private long getSegmentsMemoryUsage(String idx) {
IndicesSegmentResponse response = client().admin().indices().segments(Requests.indicesSegmentsRequest(idx)).actionGet();
IndexSegments indicesSegments = response.getIndices().get(idx);
long total = 0;
for (IndexShardSegments indexShardSegments : indicesSegments) {
for (ShardSegments shardSegments : indexShardSegments) {
for (Segment segment : shardSegments) {
logger.debug("+=" + segment.memoryInBytes + " " + indexShardSegments.getShardId() + " " + shardSegments.getIndex());
total += segment.memoryInBytes;
}
}
}
return total;
}
private enum LoadingMethod {
LAZY {
@Override
CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) {
return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.LAZY_VALUE));
}
},
EAGER {
@Override
CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) {
return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.EAGER_VALUE));
}
@Override
boolean isLazy() {
return false;
}
},
EAGER_PER_FIELD {
@Override
CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) throws Exception {
return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.LAZY_VALUE)).addMapping(type, JsonXContent.contentBuilder()
.startObject()
.startObject(type)
.startObject("properties")
.startObject(fieldName)
.field("type", "string")
.startObject("norms")
.field("loading", Loading.EAGER_VALUE)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
);
}
@Override
boolean isLazy() {
return false;
}
};
private static Settings SINGLE_SHARD_NO_REPLICA = ImmutableSettings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build();
abstract CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) throws Exception;
boolean isLazy() {
return true;
}
}
// NOTE: we have to ensure we defeat compression strategies of the default codec...
public void testEagerLoading() throws Exception {
for (LoadingMethod method : LoadingMethod.values()) {
logger.debug("METHOD " + method);
String indexName = method.name().toLowerCase(Locale.ROOT);
assertAcked(method.createIndex(indexName, "t", "foo"));
// index a doc with 1 token, and one with 3 tokens so we dont get CONST compressed (otherwise norms take zero memory usage)
client().prepareIndex(indexName, "t", "1").setSource("foo", "bar").execute().actionGet();
client().prepareIndex(indexName, "t", "2").setSource("foo", "bar baz foo").setRefresh(true).execute().actionGet();
ensureGreen(indexName);
long memoryUsage0 = getSegmentsMemoryUsage(indexName);
// queries load norms if they were not loaded before
client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("foo", "bar")).execute().actionGet();
long memoryUsage1 = getSegmentsMemoryUsage(indexName);
if (method.isLazy()) {
assertThat(memoryUsage1, greaterThan(memoryUsage0));
} else {
assertThat(memoryUsage1, equalTo(memoryUsage0));
}
}
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.setup;
import static java.awt.GridBagConstraints.BOTH;
import static java.awt.GridBagConstraints.CENTER;
import static java.awt.GridBagConstraints.HORIZONTAL;
import static java.awt.GridBagConstraints.NONE;
import static java.awt.GridBagConstraints.NORTH;
import static java.awt.GridBagConstraints.SOUTH;
import static java.awt.GridBagConstraints.SOUTHEAST;
import java.awt.Color;
import java.awt.Desktop;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.BorderFactory;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.JTable;
import javax.swing.event.TableModelEvent;
import javax.swing.event.TableModelListener;
import javax.swing.table.DefaultTableModel;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.badlogic.gdx.setup.GdxSetupUI.SetupButton;
public class ExternalExtensionsDialog extends JDialog implements TableModelListener {
private JPanel contentPane;
private SetupButton buttonOK;
private SetupButton buttonCancel;
private JPanel topPanel;
private ExtensionTableModel tableModel;
JTable table;
private JPanel bottomPanel;
private JPanel buttonPanel;
private JScrollPane scrollPane;
private JLabel warningNotice;
private JLabel warningNotice2;
private List<Dependency> mainDependenciesSnapshot = new ArrayList<Dependency>();
List<Dependency> mainDependencies;
public ExternalExtensionsDialog (List<Dependency> mainDependencies) {
this.mainDependencies = mainDependencies;
contentPane = new JPanel(new GridBagLayout());
setContentPane(contentPane);
setModal(true);
getRootPane().setDefaultButton(buttonOK);
uiLayout();
uiStyle();
buttonOK.addActionListener(new ActionListener() {
public void actionPerformed (ActionEvent e) {
onOK();
}
});
buttonCancel.addActionListener(new ActionListener() {
@Override
public void actionPerformed (ActionEvent e) {
onCancel();
}
});
setTitle("Third party external extensions");
setSize(600, 300);
setLocationRelativeTo(null);
}
public void showDialog () {
takeSnapshot();
setVisible(true);
}
private void uiLayout () {
topPanel = new JPanel(new GridBagLayout());
topPanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
warningNotice = new JLabel("List of third party extensions for LibGDX");
warningNotice2 = new JLabel("These are not maintained by the LibGDX team, please see the support links for info and help");
warningNotice.setHorizontalAlignment(JLabel.CENTER);
warningNotice2.setHorizontalAlignment(JLabel.CENTER);
topPanel.add(warningNotice, new GridBagConstraints(0, 0, 1, 1, 1, 0, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
topPanel.add(warningNotice2, new GridBagConstraints(0, 1, 1, 1, 1, 0, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
JSeparator separator = new JSeparator();
separator.setForeground(new Color(85, 85, 85));
separator.setBackground(new Color(85, 85, 85));
topPanel.add(separator, new GridBagConstraints(0, 2, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
tableModel = new ExtensionTableModel();
table = new JTable(tableModel) {
@Override
public String getToolTipText (MouseEvent e) {
return ((ExtensionTableModel)getModel()).getToolTip(e);
}
};
table.getColumnModel().getColumn(0).setPreferredWidth(10);
table.getColumnModel().getColumn(1).setPreferredWidth(50);
table.getColumnModel().getColumn(2).setPreferredWidth(100);
table.getColumnModel().getColumn(3).setPreferredWidth(20);
table.getColumnModel().getColumn(4).setPreferredWidth(30);
table.setAutoResizeMode(JTable.AUTO_RESIZE_LAST_COLUMN);
table.getTableHeader().setReorderingAllowed(false);
table.getModel().addTableModelListener(this);
table.addMouseListener(new MouseAdapter() {
public void mouseClicked (MouseEvent e) {
int row = table.getSelectedRow();
int column = table.getSelectedColumn();
if (column == 5) {
URI uri = ((ExtensionTableModel)table.getModel()).getURI(row, column);
if (uri != null) {
try {
Desktop.getDesktop().browse(uri);
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
});
scrollPane = new JScrollPane(table);
bottomPanel = new JPanel(new GridBagLayout());
buttonPanel = new JPanel(new GridBagLayout());
buttonPanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
buttonOK = new SetupButton("Save");
buttonCancel = new SetupButton("Cancel");
buttonPanel.add(buttonOK, new GridBagConstraints(0, 0, 1, 1, 0, 0, CENTER, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
buttonPanel.add(buttonCancel, new GridBagConstraints(1, 0, 1, 1, 0, 0, CENTER, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
bottomPanel.add(buttonPanel, new GridBagConstraints(3, 0, 1, 1, 1, 1, SOUTHEAST, NONE, new Insets(0, 0, 0, 0), 0, 0));
contentPane.add(topPanel, new GridBagConstraints(0, 0, 1, 1, 1, 0.1, NORTH, BOTH, new Insets(0, 0, 0, 0), 0, 0));
contentPane.add(scrollPane, new GridBagConstraints(0, 1, 1, 1, 1, 1, NORTH, BOTH, new Insets(0, 0, 0, 0), 0, 0));
contentPane.add(bottomPanel, new GridBagConstraints(0, 2, 1, 1, 1, 0, SOUTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
try {
initData();
} catch (Exception e) {
e.printStackTrace();
}
}
private void initData () throws ParserConfigurationException, IOException, SAXException {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = dbFactory.newDocumentBuilder();
Document doc = builder.parse(ExternalExtensionsDialog.class
.getResourceAsStream("/com/badlogic/gdx/setup/data/extensions.xml"));
doc.getDocumentElement().normalize();
NodeList nList = doc.getElementsByTagName("extension");
for (int i = 0; i < nList.getLength(); i++) {
Node nNode = nList.item(i);
if (nNode.getNodeType() == Node.ELEMENT_NODE) {
Element eElement = (Element)nNode;
String name = eElement.getElementsByTagName("name").item(0).getTextContent();
String description = eElement.getElementsByTagName("description").item(0).getTextContent();
String version = eElement.getElementsByTagName("version").item(0).getTextContent();
String compatibility = eElement.getElementsByTagName("compatibility").item(0).getTextContent();
String url = eElement.getElementsByTagName("website").item(0).getTextContent();
String[] gwtInherits = null;
NodeList inheritsNode = eElement.getElementsByTagName("inherit");
gwtInherits = new String[inheritsNode.getLength()];
for (int j = 0; j < inheritsNode.getLength(); j++)
gwtInherits[j] = inheritsNode.item(j).getTextContent();
final HashMap<String, List<ExternalExtensionDependency>> dependencies = new HashMap<String, List<ExternalExtensionDependency>>();
addToDependencyMapFromXML(dependencies, eElement, "core");
addToDependencyMapFromXML(dependencies, eElement, "desktop");
addToDependencyMapFromXML(dependencies, eElement, "android");
addToDependencyMapFromXML(dependencies, eElement, "ios");
addToDependencyMapFromXML(dependencies, eElement, "html");
URI uri = null;
try {
uri = new URI(url);
} catch (URISyntaxException e) {
e.printStackTrace();
}
if (uri != null) {
final ExternalExtension extension = new ExternalExtension(name, gwtInherits, description, version);
extension.setDependencies(dependencies);
tableModel.addExtension(extension, false, name, description, version, compatibility, uri);
}
}
}
}
private void uiStyle () {
contentPane.setBackground(new Color(36, 36, 36));
topPanel.setBackground(new Color(36, 36, 36));
topPanel.setForeground(new Color(255, 255, 255));
table.setBackground(new Color(46, 46, 46));
table.setForeground(new Color(255, 255, 255));
bottomPanel.setBackground(new Color(36, 36, 36));
bottomPanel.setForeground(new Color(255, 255, 255));
buttonPanel.setBackground(new Color(36, 36, 36));
buttonPanel.setForeground(new Color(255, 255, 255));
scrollPane.setBorder(BorderFactory.createEmptyBorder());
scrollPane.setBackground(new Color(36, 36, 36));
scrollPane.getViewport().setBackground(new Color(36, 36, 36));
warningNotice.setForeground(new Color(255, 20, 20));
warningNotice2.setForeground(new Color(255, 20, 20));
}
void onOK () {
setVisible(false);
}
void onCancel () {
setVisible(false);
restore();
}
private void takeSnapshot () {
mainDependenciesSnapshot.clear();
for (int i = 0; i < mainDependencies.size(); i++) {
mainDependenciesSnapshot.add(mainDependencies.get(i));
}
}
private void restore () {
mainDependencies.clear();
((ExtensionTableModel)table.getModel()).unselectAll();
for (int i = 0; i < mainDependenciesSnapshot.size(); i++) {
mainDependencies.add(mainDependenciesSnapshot.get(i));
String extensionName = mainDependenciesSnapshot.get(i).getName();
if (((ExtensionTableModel)table.getModel()).hasExtension(extensionName)) {
((ExtensionTableModel)table.getModel()).setSelected(extensionName, true);
} else {
}
}
}
private void addToDependencyMapFromXML (Map<String, List<ExternalExtensionDependency>> dependencies, Element eElement, String platform) {
if (eElement.getElementsByTagName(platform).item(0) != null) {
Element project = (Element)eElement.getElementsByTagName(platform).item(0);
ArrayList<ExternalExtensionDependency> deps = new ArrayList<ExternalExtensionDependency>();
if (project.getTextContent().trim().equals("")) {
// No dependencies required
} else if (project.getTextContent().trim().equals("null")) {
// Not supported
deps = null;
} else {
NodeList nList = project.getElementsByTagName("dependency");
for (int i = 0; i < nList.getLength(); i++) {
Node nNode = nList.item(i);
if (nNode.getNodeType() == Node.ELEMENT_NODE) {
Element dependencyNode = (Element)nNode;
boolean external = Boolean.parseBoolean(dependencyNode.getAttribute("external"));
deps.add(new ExternalExtensionDependency(dependencyNode.getTextContent(), external));
}
}
}
dependencies.put(platform, deps);
}
}
class ExtensionTableModel extends DefaultTableModel {
private HashMap<Integer, ExternalExtension> extensions = new HashMap<Integer, ExternalExtension>();
private int rowCount = 0;
public ExtensionTableModel () {
addColumn("Use");
addColumn("Extension");
addColumn("Description");
addColumn("Version");
addColumn("Compatibility");
addColumn("Support");
}
public ExternalExtension getExtension (int row) {
return extensions.get(row);
}
public URI getURI (int row, int column) {
if (column != 5) return null;
return (URI)getValueAt(row, column);
}
@Override
public Class getColumnClass (int column) {
if (column == 0) return Boolean.class;
if (column == 5) return URI.class;
return super.getColumnClass(column);
}
@Override
public boolean isCellEditable (int x, int y) {
return y == 0;
}
public String getToolTip (MouseEvent e) {
int row = table.rowAtPoint(e.getPoint());
int column = table.columnAtPoint(e.getPoint());
if (column == 5) {
return "Click me!";
} else if (column != 0) {
return getValueAt(row, column).toString();
} else {
return "Select if you want to use this extension!";
}
}
public void unselectAll () {
for (int row : extensions.keySet()) {
table.setValueAt(false, row, 0);
}
}
public boolean hasExtension (String extensionName) {
for (ExternalExtension extension : extensions.values()) {
if (extension.getName().equals(extensionName)) return true;
}
return false;
}
public void setSelected (String extensionName, boolean selected) {
int row = -1;
for (int i : extensions.keySet()) {
if (extensions.get(i).getName().equals(extensionName)) {
row = i;
break;
}
}
if (row != -1) table.setValueAt(selected, row, 0);
}
public void addExtension (ExternalExtension extension, Boolean checkbox, String name, String description, String version,
String compatibility, URI support) {
addRow(new Object[] {checkbox, name, description, version, compatibility, support});
extensions.put(rowCount++, extension);
}
}
@Override
public void tableChanged (TableModelEvent e) {
int row = e.getFirstRow();
int column = e.getColumn();
if (column == 0) {
ExternalExtension extension = ((ExtensionTableModel)table.getModel()).getExtension(row);
Dependency dep = extension.generateDependency();
boolean selected = (Boolean)table.getModel().getValueAt(row, 0);
if (selected) {
if (!mainDependencies.contains(dep)) {
mainDependencies.add(dep);
}
} else {
mainDependencies.remove(dep);
}
}
}
}
| |
package com.faber.dao;
//<editor-fold defaultstate="collapsed" desc="IMPORT">
import com.faber.connection.ConnectSQL;
import com.faber.dto.AdvertisementDTO;
import com.faber.utils.GeneratedIDUtils;
import com.faber.utils.LogUtils;
import com.faber.utils.MyConstants;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.exception.ExceptionUtils;
//</editor-fold>
/**
*
* @author Le Dinh Tuan
*/
public class AdvertisementDAO {
LogUtils log = new LogUtils();
SimpleDateFormat date_format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
//<editor-fold defaultstate="collapsed" desc="GET LIST ADVERTISEMENT BY CAMPAIGN ID">
public List<AdvertisementDTO> getListAdvertisementByCampaignID(Integer campaignID) {
List<AdvertisementDTO> list = new ArrayList<>();
Connection con = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
String sql = "SELECT AD.*, AI.IMAGE_PATH \n"
+ "FROM B_ADVERTISEMENT AS AD\n"
+ "LEFT JOIN B_ADVERTISEMENT_IMAGE AS AI\n"
+ "ON AD.AD_ID = AI.ADS_ID\n"
+ "WHERE CAMPAIGN_ID=?\n"
+ "GROUP BY AD.AD_ID\n"
+ "ORDER BY AD.UPDATED DESC";
con = (Connection) ConnectSQL.getConnection();
ps = (PreparedStatement) con.prepareStatement(sql);
ps.setInt(1, campaignID);
rs = ps.executeQuery();
while (rs.next()) {
AdvertisementDTO advertisementDTO = new AdvertisementDTO();
advertisementDTO.setAdNo("ADS-" + rs.getString("AD_ID"));
advertisementDTO.setAdID(rs.getInt("AD_ID"));
advertisementDTO.setAdName(rs.getString("AD_NAME"));
advertisementDTO.setAdType(rs.getString("AD_TYPE"));
advertisementDTO.setCampaignID(rs.getInt("CAMPAIGN_ID"));
advertisementDTO.setAdDevice(rs.getString("AD_DEVICE"));
advertisementDTO.setTargetUrl(rs.getString("TARGET_URL"));
advertisementDTO.setImagePath(rs.getString("IMAGE_PATH"));
advertisementDTO.setAdTextTitle(rs.getString("AD_TEXT_TITLE"));
advertisementDTO.setAdMetaDescription(rs.getString("AD_META_DESCRIPTION"));
advertisementDTO.setIsApproval(rs.getInt("IS_APPROVAL"));
advertisementDTO.setReasonNotApproval(rs.getString("REASON_NOT_APPROVAL"));
advertisementDTO.setCreateBy(rs.getString("CREATE_BY"));
advertisementDTO.setCreated(date_format.format(rs.getTimestamp("CREATED")));
list.add(advertisementDTO);
}
} catch (Exception ex) {
log.writeLog(MyConstants.ERROR_MYSQL, ExceptionUtils.getStackTrace(ex));
} finally {
try {
if (rs != null) {
rs.close();
}
if (ps != null) {
ps.close();
}
if (con != null) {
con.close();
}
} catch (Exception e) {
}
}
return list;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="GET LIST ADVERTISEMENT BY ID">
public List<AdvertisementDTO> getListAdvertisementByID(Integer adsID) {
List<AdvertisementDTO> list = new ArrayList<>();
Connection con = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
String sql = "SELECT AD.*\n"
+ "FROM B_ADVERTISEMENT AS AD\n"
+ "WHERE AD_ID=?";
con = (Connection) ConnectSQL.getConnection();
ps = (PreparedStatement) con.prepareStatement(sql);
ps.setInt(1, adsID);
rs = ps.executeQuery();
while (rs.next()) {
AdvertisementImageDAO advertisementImageDAO = new AdvertisementImageDAO();
AdvertisementDTO advertisementDTO = new AdvertisementDTO();
advertisementDTO.setAdID(rs.getInt("AD_ID"));
advertisementDTO.setAdName(rs.getString("AD_NAME"));
advertisementDTO.setAdType(rs.getString("AD_TYPE"));
advertisementDTO.setCampaignID(rs.getInt("CAMPAIGN_ID"));
advertisementDTO.setAdDevice(rs.getString("AD_DEVICE"));
advertisementDTO.setTargetUrl(rs.getString("TARGET_URL"));
advertisementDTO.setAdTextTitle(rs.getString("AD_TEXT_TITLE"));
advertisementDTO.setAdMetaDescription(rs.getString("AD_META_DESCRIPTION"));
advertisementDTO.setIsApproval(rs.getInt("IS_APPROVAL"));
advertisementDTO.setListImage(advertisementImageDAO.getListImageByAdvertimentID(adsID));
list.add(advertisementDTO);
}
} catch (Exception ex) {
log.writeLog(MyConstants.ERROR_MYSQL, ExceptionUtils.getStackTrace(ex));
} finally {
try {
if (rs != null) {
rs.close();
}
if (ps != null) {
ps.close();
}
if (con != null) {
con.close();
}
} catch (Exception e) {
}
}
return list;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="ADD ADVERTISEMENT">
public String addAdvertisement(AdvertisementDTO advertisementDTO) {
Connection con = null;
PreparedStatement ps = null;
try {
con = (Connection) ConnectSQL.getConnection();
String sql = "INSERT INTO B_ADVERTISEMENT(AD_ID, AD_NAME, AD_TYPE, CAMPAIGN_ID, AD_DEVICE, TARGET_URL, "
+ "AD_TEXT_TITLE, AD_META_DESCRIPTION, CREATE_BY, CREATED, UPDATED) "
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?)";
ps = (PreparedStatement) con.prepareStatement(sql);
GeneratedIDUtils generatedIDUtils = new GeneratedIDUtils();
Integer adsID = generatedIDUtils.generatedID();
ps.setInt(1, adsID);
ps.setString(2, advertisementDTO.getAdName());
ps.setString(3, advertisementDTO.getAdType());
ps.setInt(4, advertisementDTO.getCampaignID());
ps.setString(5, advertisementDTO.getAdDevice());
ps.setString(6, advertisementDTO.getTargetUrl());
ps.setString(7, advertisementDTO.getAdTextTitle());
ps.setString(8, advertisementDTO.getAdMetaDescription());
ps.setString(9, advertisementDTO.getCreateBy());
ps.setString(10, advertisementDTO.getCreated());
ps.setString(11, advertisementDTO.getCreated());
int n = ps.executeUpdate();
if (n == 1) {
return adsID.toString();
}
return "failed";
} catch (Exception ex) {
if (ex.getMessage().contains("Duplicate")) {
addAdvertisement(advertisementDTO);
}
log.writeLog(MyConstants.ERROR_MYSQL, ExceptionUtils.getStackTrace(ex));
return "failed";
} finally {
try {
if (ps != null) {
ps.close();
}
if (con != null) {
con.close();
}
} catch (Exception e) {
}
}
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="UPDATE ADVERTISEMENT">
public String updateAdvertisement(AdvertisementDTO advertisementDTO) {
Connection con = null;
PreparedStatement ps = null;
try {
con = (Connection) ConnectSQL.getConnection();
String sql = "UPDATE B_ADVERTISEMENT SET AD_NAME=?, AD_TYPE=?, AD_DEVICE=?, TARGET_URL=?, AD_TEXT_TITLE=?, AD_META_DESCRIPTION=? "
+ "WHERE AD_ID=?";
ps = (PreparedStatement) con.prepareStatement(sql);
ps.setString(1, advertisementDTO.getAdName());
ps.setString(2, advertisementDTO.getAdType());
ps.setString(3, advertisementDTO.getAdDevice());
ps.setString(4, advertisementDTO.getTargetUrl());
ps.setString(5, advertisementDTO.getAdTextTitle());
ps.setString(6, advertisementDTO.getAdMetaDescription());
ps.setInt(7, advertisementDTO.getAdID());
int n = ps.executeUpdate();
if (n == 1) {
return "success";
}
return "failed";
} catch (Exception ex) {
log.writeLog(MyConstants.ERROR_MYSQL, ExceptionUtils.getStackTrace(ex));
return "failed";
} finally {
try {
if (ps != null) {
ps.close();
}
if (con != null) {
con.close();
}
} catch (Exception e) {
}
}
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="DELETE ADVERTISEMENT">
public String deleteAdvertisement(Integer adsID) {
Connection con = null;
CallableStatement callableStatement = null;
try {
con = (Connection) ConnectSQL.getConnection();
String storeProc = "{CALL PROC_DELETE_ADVERTISEMENT(?)}";
callableStatement = con.prepareCall(storeProc);
callableStatement.setInt(1, adsID);
callableStatement.executeQuery();
return "success";
} catch (Exception ex) {
log.writeLog(MyConstants.ERROR_MYSQL, ExceptionUtils.getStackTrace(ex));
return "failed";
} finally {
try {
if (callableStatement != null) {
callableStatement.close();
}
if (con != null) {
con.close();
}
} catch (Exception e) {
}
}
}
//</editor-fold>
}
| |
package com.lambdaworks.redis;
import static com.google.common.base.Preconditions.checkState;
import static com.lambdaworks.redis.ConnectionEventTrigger.local;
import static com.lambdaworks.redis.ConnectionEventTrigger.remote;
import static com.lambdaworks.redis.PlainChannelInitializer.INITIALIZING_CMD_BUILDER;
import static com.lambdaworks.redis.PlainChannelInitializer.pingBeforeActivate;
import static com.lambdaworks.redis.PlainChannelInitializer.removeIfExists;
import java.util.List;
import java.util.concurrent.Future;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.SSLParameters;
import com.google.common.util.concurrent.SettableFuture;
import com.lambdaworks.redis.event.EventBus;
import com.lambdaworks.redis.event.connection.ConnectedEvent;
import com.lambdaworks.redis.event.connection.ConnectionActivatedEvent;
import com.lambdaworks.redis.event.connection.DisconnectedEvent;
import com.lambdaworks.redis.protocol.AsyncCommand;
import io.netty.channel.Channel;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.ssl.*;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
/**
* Connection builder for SSL connections. This class is part of the internal API.
*
* @author Mark Paluch
*/
public class SslConnectionBuilder extends ConnectionBuilder {
private RedisURI redisURI;
public static SslConnectionBuilder sslConnectionBuilder() {
return new SslConnectionBuilder();
}
public SslConnectionBuilder ssl(RedisURI redisURI) {
this.redisURI = redisURI;
return this;
}
@Override
protected List<ChannelHandler> buildHandlers() {
checkState(redisURI != null, "redisURI must not be null");
checkState(redisURI.isSsl(), "redisURI is not configured for SSL (ssl is false)");
return super.buildHandlers();
}
@Override
public RedisChannelInitializer build() {
final List<ChannelHandler> channelHandlers = buildHandlers();
return new SslChannelInitializer(clientOptions().isPingBeforeActivateConnection(), channelHandlers, redisURI,
clientResources().eventBus());
}
/**
* @author Mark Paluch
*/
static class SslChannelInitializer extends io.netty.channel.ChannelInitializer<Channel> implements RedisChannelInitializer {
private final boolean pingBeforeActivate;
private final List<ChannelHandler> handlers;
private final RedisURI redisURI;
private final EventBus eventBus;
private SettableFuture<Boolean> initializedFuture = SettableFuture.create();
public SslChannelInitializer(boolean pingBeforeActivate, List<ChannelHandler> handlers, RedisURI redisURI,
EventBus eventBus) {
this.pingBeforeActivate = pingBeforeActivate;
this.handlers = handlers;
this.redisURI = redisURI;
this.eventBus = eventBus;
}
@Override
protected void initChannel(Channel channel) throws Exception {
SSLParameters sslParams = new SSLParameters();
SslContextBuilder sslContextBuilder = SslContextBuilder.forClient().sslProvider(SslProvider.JDK);
if (redisURI.isVerifyPeer()) {
sslParams.setEndpointIdentificationAlgorithm("HTTPS");
} else {
sslContextBuilder.trustManager(InsecureTrustManagerFactory.INSTANCE);
}
SslContext sslContext = sslContextBuilder.build();
SSLEngine sslEngine = sslContext.newEngine(channel.alloc(), redisURI.getHost(), redisURI.getPort());
sslEngine.setSSLParameters(sslParams);
removeIfExists(channel.pipeline(), SslHandler.class);
if (channel.pipeline().get("first") == null) {
channel.pipeline().addFirst("first", new ChannelDuplexHandler() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
eventBus.publish(new ConnectedEvent(local(ctx), remote(ctx)));
super.channelActive(ctx);
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
eventBus.publish(new DisconnectedEvent(local(ctx), remote(ctx)));
super.channelInactive(ctx);
}
});
}
SslHandler sslHandler = new SslHandler(sslEngine, redisURI.isStartTls());
channel.pipeline().addLast(sslHandler);
if (channel.pipeline().get("channelActivator") == null) {
channel.pipeline().addLast("channelActivator", new RedisChannelInitializerImpl() {
private AsyncCommand<?, ?, ?> pingCommand;
@Override
public Future<Boolean> channelInitialized() {
return initializedFuture;
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
initializedFuture = SettableFuture.create();
pingCommand = null;
super.channelInactive(ctx);
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
if (initializedFuture.isDone()) {
super.channelActive(ctx);
}
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof SslHandshakeCompletionEvent && !initializedFuture.isDone()) {
SslHandshakeCompletionEvent event = (SslHandshakeCompletionEvent) evt;
if (event.isSuccess()) {
if (pingBeforeActivate) {
if (redisURI.getPassword() != null && redisURI.getPassword().length != 0) {
pingCommand = new AsyncCommand<>(INITIALIZING_CMD_BUILDER.auth(new String(redisURI.getPassword())));
} else {
pingCommand = new AsyncCommand<>(INITIALIZING_CMD_BUILDER.ping());
}
pingBeforeActivate(pingCommand, initializedFuture, ctx, handlers);
} else {
ctx.fireChannelActive();
}
} else {
initializedFuture.setException(event.cause());
}
}
if (evt instanceof ConnectionEvents.Close) {
if (ctx.channel().isOpen()) {
ctx.channel().close();
}
}
if (evt instanceof ConnectionEvents.Activated) {
if (!initializedFuture.isDone()) {
initializedFuture.set(true);
eventBus.publish(new ConnectionActivatedEvent(local(ctx), remote(ctx)));
}
}
super.userEventTriggered(ctx, evt);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if (cause instanceof SSLHandshakeException || cause.getCause() instanceof SSLException) {
initializedFuture.setException(cause);
}
super.exceptionCaught(ctx, cause);
}
});
}
for (ChannelHandler handler : handlers) {
removeIfExists(channel.pipeline(), handler.getClass());
channel.pipeline().addLast(handler);
}
}
@Override
public Future<Boolean> channelInitialized() {
return initializedFuture;
}
}
}
| |
package labs.akhdani;
import com.fasterxml.jackson.databind.ObjectMapper;
import labs.akhdani.alt.*;
import spark.Route;
import java.io.*;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import static spark.Spark.*;
import static spark.Spark.externalStaticFileLocation;
public class Alt {
private static final String TAG = Alt.class.getName();
public static final int ENV_DEVELOPMENT = 0;
public static final int ENV_TESTING = 1;
public static final int ENV_PRODUCTION = 2;
public static final int STATUS_OK = 200;
public static final int STATUS_UNAUTHORIZED = 401;
public static final int STATUS_FORBIDDEN = 403;
public static final int STATUS_NOTFOUND = 404;
public static final int STATUS_ERROR = 500;
public int environment = Alt.ENV_PRODUCTION;
public String defaultRoute = "home";
public int port = 9090;
public String staticFolder = "static";
public int maxPostSize = 25 * 1024 * 1024;
public boolean surpressResponseCode = true;
public long timestart = 0;
public long timestop = 0;
public long memorystart = 0;
public long memorystop = 0;
public static Map<String, Alt> instances = new HashMap<>();
private Class<?> clazz;
private Alt instance;
private Runtime runtime = Runtime.getRuntime();
private ObjectMapper mapper = new ObjectMapper();
public static Alt instance() {
return instances.get("default");
}
public static Alt instance(Class<?> clazz){
return instance("default", clazz);
}
public static Alt instance(Class<?> clazz, Map<String, Object> config){
return instance("default", clazz, config);
}
public static Alt instance(String name, Class<?> clazz){
Map<String, Object> config = new HashMap<>();
if(AltConfig.get("app.port") != null)
config.put("port", Integer.valueOf(AltConfig.get("app.port")));
if(AltConfig.get("app.environment") != null)
config.put("environment", AltConfig.get("app.environment").equalsIgnoreCase("development") ? ENV_DEVELOPMENT : (AltConfig.get("app.environment").equalsIgnoreCase("testing") ? ENV_TESTING : ENV_PRODUCTION));
if(AltConfig.get("app.staticFolder") != null)
config.put("staticFolder", AltConfig.get("app.staticFolder"));
if(AltConfig.get("app.defaultRoute") != null)
config.put("defaultRoute", AltConfig.get("app.defaultRoute"));
if(AltConfig.get("app.maxPostSize") != null)
config.put("defaultRoute", AltConfig.get("app.defaultRoute"));
if(AltConfig.get("app.surpressResponseCode") != null)
config.put("surpressResponseCode", AltConfig.get("app.surpressResponseCode").equalsIgnoreCase("true"));
return instance(name, clazz, config);
}
public static Alt instance(String name, Class<?> clazz, Map<String, Object> config){
if(instances.get(name) == null){
Alt alt = new Alt(clazz);
if(config.get("port") != null)
alt.setPort((Integer) config.get("port"));
if(config.get("staticFolder") != null)
alt.setStaticFolder((String) config.get("staticFolder"));
if(config.get("environment") != null)
alt.setEnvironment((Integer) config.get("environment"));
if(config.get("defaultRoute") != null)
alt.setDefaultRoute((String) config.get("defaultRoute"));
if(config.get("maxPostSize") != null)
alt.setMaxPostSize((Integer) config.get("maxPostSize"));
if(config.get("surpressResponseCode") != null)
alt.setSurpressResponseCode((Boolean) config.get("surpressResponseCode"));
instances.put(name, alt);
}
return instances.get(name);
}
public Alt(Class<?> clazz){
this.clazz = clazz;
this.instance = this;
}
private void setEnvironment(int environment){
this.environment = environment;
}
private void setDefaultRoute(String defaultRoute){
this.defaultRoute = defaultRoute;
}
private void setPort(int port){
this.port = port;
port(port);
}
private void setStaticFolder(String staticFolder){
this.staticFolder = staticFolder;
staticFileLocation(staticFolder);
}
private void setMaxPostSize(int maxPostSize){
this.maxPostSize = maxPostSize;
}
private void setSurpressResponseCode(boolean surpressResponseCode){
this.surpressResponseCode = surpressResponseCode;
}
public void route(String url){
url = url.equalsIgnoreCase("") ? "/" + url : url;
Route route = (req, res)->{
AltHttpRequest altHttpRequest = new AltHttpRequest(req, instance);
AltHttpResponse altHttpResponse = new AltHttpResponse(res, instance);
// handle options request
if(req.requestMethod().equalsIgnoreCase("options"))
return instance.response(altHttpRequest, altHttpResponse, Alt.STATUS_OK, "", null);
instance.timestart = System.nanoTime();
instance.memorystart = runtime.totalMemory() - runtime.freeMemory();
String[] splat = altHttpRequest.splat();
// handle without "/"
if(splat.length == 0) {
splat = new String[1];
splat[0] = this.defaultRoute;
}
// uri
String uri = splat[0].charAt(splat[0].length()-1) == '/' ? splat[0].substring(0, splat[0].length()-1) : splat[0];
// skip favicon
if(uri.equalsIgnoreCase("favicon.ico"))
return "";
// split uri by "/"
String[] uris = uri.split("/");
// handle static file served in
if(this.staticFolder.equalsIgnoreCase("") || uris[0].equalsIgnoreCase(this.staticFolder)){
try{
InputStream inputStream;
try {
File file = new File(this.staticFolder + uri);
inputStream = new FileInputStream(uri);
}catch(Exception e) {
ClassLoader classloader = Thread.currentThread().getContextClassLoader();
inputStream = classloader.getResourceAsStream(uri);
}
res.type(Files.probeContentType(Paths.get(uri)));
res.status(200);
byte[] buf = new byte[1024];
OutputStream os = res.raw().getOutputStream();
OutputStreamWriter outWriter = new OutputStreamWriter(os);
int count = 0;
while ((count = inputStream.read(buf)) >= 0) {
os.write(buf, 0, count);
}
inputStream.close();
outWriter.close();
return "";
}catch (Exception e){
e.printStackTrace();
return instance.response(altHttpRequest, altHttpResponse, Alt.STATUS_NOTFOUND, "File tidak ditemukan", null);
}
}
// serve as webservice
int indexOf = uris.length > 1 ? uris.length-2 : uris.length-1;
String className = uris[indexOf].substring(0, 1).toUpperCase() + uris[indexOf].substring(1);
String methodName = uris.length > 1 ? uris[uris.length-1] : "index";
String packageLocation = ".route.";
for(int i=0; i<indexOf; i++){
packageLocation += uris[i] + ".";
}
// try to find class to handle route
try{
// create session
altHttpRequest.session(true);
Class<?> clazz = Class.forName(this.clazz.getPackage().getName() + packageLocation + className);
Object obj = clazz.newInstance();
Object result = clazz.getMethod(methodName, AltHttpRequest.class, AltHttpResponse.class).invoke(obj, altHttpRequest, altHttpResponse);
return instance.response(altHttpRequest, altHttpResponse, Alt.STATUS_OK, "", result);
}catch(Exception e){
if(e.getClass().equals(ClassNotFoundException.class) || e.getClass().equals(NoSuchMethodException.class)){
return instance.response(altHttpRequest, altHttpResponse, Alt.STATUS_NOTFOUND, "URL Not Found", null);
}else if(e.getClass().equals(InvocationTargetException.class)){
if(e.getCause() instanceof AltException){
return instance.response(altHttpRequest, altHttpResponse, ((AltException) e.getCause()).getCode(), e.getCause().getMessage(), null);
}else{
if(this.instance.environment == Alt.ENV_DEVELOPMENT)
e.getCause().printStackTrace();
return instance.response(altHttpRequest, altHttpResponse, Alt.STATUS_ERROR, instance.environment == Alt.ENV_PRODUCTION ? "Internal Server Error" : e.getCause().getMessage(), null);
}
}else{
return instance.response(altHttpRequest, altHttpResponse, Alt.STATUS_ERROR, instance.environment == Alt.ENV_PRODUCTION ? "Internal Server Error" : e.getMessage(), null);
}
}
};
this.route(url, route);
}
public void route(String url, Route route){
// set all http verb needed
options(url, route);
get(url, route);
post(url, route);
put(url, route);
delete(url, route);
}
public String response(AltHttpRequest altHttpRequest, AltHttpResponse altHttpResponse, int status, String message, Object res){
// set time stop
instance.timestop = System.nanoTime();
instance.memorystop = runtime.totalMemory() - runtime.freeMemory();
// set output
HashMap<String, Object> output = new HashMap<>();
output.put("s", status);
output.put("m", message);
output.put("d", res);
if(this.environment != Alt.ENV_PRODUCTION) {
output.put("t", (double) (instance.timestop - instance.timestart) / 1000000000);
output.put("u", (double) instance.memorystop / (1024 * 1024));
}
// enable cors
altHttpResponse.header("Access-Control-Allow-Origin", "*");
altHttpResponse.header("Access-Control-Allow-Headers", "Origin, Authorization, X-Requested-With, Content-Type, Accept");
altHttpResponse.header("Access-Control-Request-Method", "*");
// surpress response code
output.put("s", this.surpressResponseCode ? 200 : (Integer) output.get("s"));
// set http status
altHttpResponse.status((Integer) output.get("s"));
try{
Object data;
if((Integer) output.get("s") == Alt.STATUS_OK && instance.environment == Alt.ENV_PRODUCTION) {
data = output.get("d");
}else if((Integer) output.get("s") != Alt.STATUS_OK && instance.environment == Alt.ENV_PRODUCTION){
data = output.getOrDefault("m", "Error pada server!");
}else{
data = output;
}
AltSecure secure = new AltSecure();
String result = "";
if(data.getClass().equals(String.class) || data.getClass().equals(Integer.class) || data.getClass().equals(Float.class) || data.getClass().equals(Double.class)){
result = String.valueOf(data);
}else {
result = mapper.writeValueAsString(data);
}
return altHttpRequest.secure && Alt.instance().environment == Alt.ENV_PRODUCTION && AltConfig.get("app.secure.key") != null && !AltConfig.get("app.secure.key").equalsIgnoreCase("") && AltConfig.get("app.secure.iv") != null && !AltConfig.get("app.secure.iv").equalsIgnoreCase("") ? secure.encrypt(result) : result;
}catch (Exception e){
return "";
}
}
public void start(){
route("");
route("/");
route("/*");
}
public void log(int level, String tag){
AltLog.level = level;
AltLog.tag = tag;
}
public static String generate_token(Map<String, String> userdata){
try {
return AltJwt.encode(userdata);
} catch (AltException e) {
return "";
}
}
public static void clear_token(AltHttpRequest altHttpRequest){
altHttpRequest.session().removeAttribute("token");
}
public static void set_token(String token, AltHttpRequest altHttpRequest){
altHttpRequest.session().attribute("token", token);
}
public static String get_token(AltHttpRequest altHttpRequest){
String token = altHttpRequest.data.get("token");
if(token == null){
String tmp = altHttpRequest.headers("Authorization");
if(tmp != null && !tmp.equals("")){
String[] tmp2 = tmp.split(" ");
if(tmp2.length == 2){
token = tmp2[1];
}
}
}
if(token == null){
token = altHttpRequest.session().attribute("token");
}
return token;
}
public static Map<String, String> verify_token(String token) throws AltException {
return AltJwt.decode(token);
}
public static Map<String, String> get_userdata(AltHttpRequest altHttpRequest){
String token = Alt.get_token(altHttpRequest);
return Alt.get_userdata(token);
}
public static Map<String, String> get_userdata(String token){
try {
return AltJwt.decode(token);
} catch (AltException e) {
return new HashMap<String, String>(){{
put("userid", "");
put("username", "");
}};
}
}
public static boolean islogin(AltHttpRequest altHttpRequest){
Map<String, String> userdata = Alt.get_userdata(altHttpRequest);
return userdata.get("userid") != null && !userdata.get("userid").equals("");
}
public static boolean check(int permission, AltHttpRequest altHttpRequest){
Map<String, String> userdata = Alt.get_userdata(altHttpRequest);
int level = Integer.valueOf(userdata.get("userlevel"));
return (level & permission) > 0;
}
public static void set_permission(int permission, AltHttpRequest altHttpRequest) throws AltException {
Map<String, String> userdata = Alt.get_userdata(altHttpRequest);
String level = userdata.get("userlevel");
if(level == null || (permission == 0 && !Alt.islogin(altHttpRequest))){
throw new AltException("Anda belum login atau session anda sudah habis!", Alt.STATUS_UNAUTHORIZED);
}else if(!Alt.check(permission, altHttpRequest)){
throw new AltException("Anda tidak berhak untuk mengakses!", Alt.STATUS_FORBIDDEN);
}
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis.actions;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ActionExecutionException;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact;
import com.google.devtools.build.lib.actions.Artifact.SpecialArtifactType;
import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact;
import com.google.devtools.build.lib.actions.ArtifactOwner;
import com.google.devtools.build.lib.actions.BaseSpawn;
import com.google.devtools.build.lib.actions.Executor;
import com.google.devtools.build.lib.actions.Root;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.actions.SpawnActionContext;
import com.google.devtools.build.lib.actions.cache.Md5Digest;
import com.google.devtools.build.lib.actions.cache.Metadata;
import com.google.devtools.build.lib.actions.cache.MetadataHandler;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.FilesToRunProvider;
import com.google.devtools.build.lib.analysis.util.ActionTester;
import com.google.devtools.build.lib.analysis.util.ActionTester.ActionCombinationFactory;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.exec.util.TestExecutorBuilder;
import com.google.devtools.build.lib.vfs.FileStatus;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests {@link PopulateTreeArtifactAction}. */
@RunWith(JUnit4.class)
public class PopulateTreeArtifactActionTest extends BuildViewTestCase {
private static class TestMetadataHandler implements MetadataHandler {
private final List<Artifact> storingExpandedTreeFileArtifacts;
TestMetadataHandler(List<Artifact> storingExpandedTreeFileArtifacts) {
this.storingExpandedTreeFileArtifacts = storingExpandedTreeFileArtifacts;
}
@Override
public void addExpandedTreeOutput(TreeFileArtifact output) {
storingExpandedTreeFileArtifacts.add(output);
}
@Override
public Iterable<TreeFileArtifact> getExpandedOutputs(Artifact artifact) {
throw new UnsupportedOperationException(artifact.prettyPrint());
}
@Override
public Metadata getMetadataMaybe(Artifact artifact) {
throw new UnsupportedOperationException(artifact.prettyPrint());
}
@Override
public Metadata getMetadata(Artifact artifact) {
throw new UnsupportedOperationException(artifact.prettyPrint());
}
@Override
public void setDigestForVirtualArtifact(Artifact artifact, Md5Digest md5Digest) {
throw new UnsupportedOperationException(artifact.prettyPrint() + ": " + md5Digest);
}
@Override
public void injectDigest(ActionInput output, FileStatus statNoFollow, byte[] digest) {
throw new UnsupportedOperationException(output.toString());
}
@Override
public void markOmitted(ActionInput output) {
throw new UnsupportedOperationException(output.toString());
}
@Override
public boolean isRegularFile(Artifact artifact) {
throw new UnsupportedOperationException(artifact.prettyPrint());
}
@Override
public boolean artifactOmitted(Artifact artifact) {
throw new UnsupportedOperationException(artifact.prettyPrint());
}
@Override
public boolean isInjected(Artifact file) throws IOException {
throw new UnsupportedOperationException(file.prettyPrint());
}
@Override
public void discardOutputMetadata() {
throw new UnsupportedOperationException();
}
};
private Root root;
@Before
public void setRootDir() throws Exception {
root = Root.asDerivedRoot(scratch.dir("/exec/root"));
}
@Test
public void testActionOutputs() throws Exception {
Action action = createPopulateTreeArtifactAction();
assertThat(Artifact.toExecPaths(action.getOutputs())).containsExactly("test/archive_member");
}
@Test
public void testActionInputs() throws Exception {
Action action = createPopulateTreeArtifactAction();
assertThat(Artifact.toExecPaths(action.getInputs())).containsExactly(
"myArchive.zip",
"archiveManifest.txt",
"unzipBinary");
}
@Test
public void testSpawnOutputs() throws Exception {
PopulateTreeArtifactAction action = createPopulateTreeArtifactAction();
Spawn spawn = action.createSpawn();
Iterable<Artifact> outputs = actionInputsToArtifacts(spawn.getOutputFiles());
assertThat(Artifact.toExecPaths(outputs)).containsExactly(
"test/archive_member/archive_members/1.class",
"test/archive_member/archive_members/2.class",
"test/archive_member/archive_members/txt/text.txt");
}
@Test
public void testSpawnInputs() throws Exception {
PopulateTreeArtifactAction action = createPopulateTreeArtifactAction();
Spawn spawn = action.createSpawn();
Iterable<Artifact> inputs = actionInputsToArtifacts(spawn.getInputFiles());
assertThat(Artifact.toExecPaths(inputs)).containsExactly(
"myArchive.zip",
"archiveManifest.txt",
"unzipBinary");
}
@Test
public void testSpawnArguments() throws Exception {
PopulateTreeArtifactAction action = createPopulateTreeArtifactAction();
BaseSpawn spawn = (BaseSpawn) action.createSpawn();
assertThat(spawn.getArguments()).containsExactly(
"unzipBinary",
"x",
"myArchive.zip",
"-d",
"test/archive_member",
"@archiveManifest.txt").inOrder();
}
@Test
public void testTreeArtifactPopulated() throws Exception {
ArrayList<Artifact> treefileArtifacts = new ArrayList<Artifact>();
PopulateTreeArtifactAction action = createPopulateTreeArtifactAction();
ActionExecutionContext executionContext = actionExecutionContext(treefileArtifacts);
action.execute(executionContext);
assertThat(Artifact.toExecPaths(treefileArtifacts)).containsExactly(
"test/archive_member/archive_members/1.class",
"test/archive_member/archive_members/2.class",
"test/archive_member/archive_members/txt/text.txt");
}
@Test
public void testInvalidManifestEntryPaths() throws Exception {
Action action = createPopulateTreeArtifactAction();
scratch.overwriteFile(
"archiveManifest.txt",
"archive_members/1.class",
"../invalid_relative_path/myfile.class");
ActionExecutionContext executionContext = actionExecutionContext(new ArrayList<Artifact>());
try {
action.execute(executionContext);
fail("Invalid manifest entry paths, expected exception");
} catch (ActionExecutionException e) {
// Expect ActionExecutionException
}
}
@Test
public void testTreeFileArtifactPathPrefixConflicts() throws Exception {
Action action = createPopulateTreeArtifactAction();
scratch.overwriteFile(
"archiveManifest.txt",
"archive_members/conflict",
"archive_members/conflict/1.class");
ActionExecutionContext executionContext = actionExecutionContext(new ArrayList<Artifact>());
try {
action.execute(executionContext);
fail("Artifact path prefix conflicts, expected exception");
} catch (ActionExecutionException e) {
// Expect ActionExecutionException
}
}
@Test
public void testEmptyTreeArtifactInputAndOutput() throws Exception {
Action action = createPopulateTreeArtifactAction();
scratch.overwriteFile("archiveManifest.txt", "");
ArrayList<Artifact> treeFileArtifacts = new ArrayList<Artifact>();
ActionExecutionContext executionContext = actionExecutionContext(treeFileArtifacts);
action.execute(executionContext);
assertThat(treeFileArtifacts).isEmpty();
}
@Test
public void testOutputTreeFileArtifactDirsCreated() throws Exception {
Action action = createPopulateTreeArtifactAction();
scratch.overwriteFile(
"archiveManifest.txt",
"archive_members/dirA/memberA",
"archive_members/dirB/memberB");
ArrayList<Artifact> treeFileArtifacts = new ArrayList<Artifact>();
ActionExecutionContext executionContext = actionExecutionContext(treeFileArtifacts);
action.execute(executionContext);
// We check whether the parent directory structures of output TreeFileArtifacts exist even
// though the spawn is not executed (the SpawnActionContext is mocked out).
assertThat(treeFileArtifacts).hasSize(2);
for (Artifact treeFileArtifact : treeFileArtifacts) {
assertThat(treeFileArtifact.getPath().getParentDirectory().exists()).isTrue();
assertThat(treeFileArtifact.getPath().exists()).isFalse();
}
}
@Test
public void testComputeKey() throws Exception {
final Artifact archiveA = getSourceArtifact("myArchiveA.zip");
final Artifact archiveB = getSourceArtifact("myArchiveB.zip");
final Artifact treeArtifactToPopulateA = createTreeArtifact("testA/archive_member");
final Artifact treeArtifactToPopulateB = createTreeArtifact("testB/archive_member");
final Artifact archiveManifestA = getSourceArtifact("archiveManifestA.txt");
final Artifact archiveManifestB = getSourceArtifact("archiveManifestB.txt");
final FilesToRunProvider zipperA = FilesToRunProvider.fromSingleExecutableArtifact(
getSourceArtifact("unzipBinaryA"));
final FilesToRunProvider zipperB = FilesToRunProvider.fromSingleExecutableArtifact(
getSourceArtifact("unzipBinaryB"));
ActionTester.runTest(16, new ActionCombinationFactory() {
@Override
public Action generate(int i) {
Artifact archive = (i & 1) == 0 ? archiveA : archiveB;
Artifact treeArtifactToPopulate = (i & 2) == 0
? treeArtifactToPopulateA : treeArtifactToPopulateB;
Artifact archiveManifest = (i & 4) == 0 ? archiveManifestA : archiveManifestB;
FilesToRunProvider zipper = (i & 8) == 0 ? zipperA : zipperB;
return new PopulateTreeArtifactAction(
ActionsTestUtil.NULL_ACTION_OWNER,
archive,
archiveManifest,
treeArtifactToPopulate,
zipper);
}
});
}
private PopulateTreeArtifactAction createPopulateTreeArtifactAction() throws Exception {
Artifact archive = getSourceArtifact("myArchive.zip");
Artifact treeArtifactToPopulate = createTreeArtifact("test/archive_member");
Artifact archiveManifest = getSourceArtifact("archiveManifest.txt");
FilesToRunProvider unzip = FilesToRunProvider.fromSingleExecutableArtifact(
getSourceArtifact("unzipBinary"));
scratch.file(
"archiveManifest.txt",
"archive_members/1.class",
"archive_members/2.class",
"archive_members/txt/text.txt");
return new PopulateTreeArtifactAction(
ActionsTestUtil.NULL_ACTION_OWNER,
archive,
archiveManifest,
treeArtifactToPopulate,
unzip);
}
private ActionExecutionContext actionExecutionContext(
List<Artifact> storingExpandedTreeFileArtifacts) throws Exception {
Executor executor = new TestExecutorBuilder(directories, null)
.setExecution(PopulateTreeArtifactAction.MNEMONIC, mock(SpawnActionContext.class))
.build();
return new ActionExecutionContext(
executor,
null,
new TestMetadataHandler(storingExpandedTreeFileArtifacts),
null,
ImmutableMap.<String, String>of(),
null);
}
private Artifact createTreeArtifact(String rootRelativePath) {
PathFragment relpath = new PathFragment(rootRelativePath);
return new SpecialArtifact(
root.getPath().getRelative(relpath),
root,
root.getExecPath().getRelative(relpath),
ArtifactOwner.NULL_OWNER,
SpecialArtifactType.TREE);
}
private Iterable<Artifact> actionInputsToArtifacts(Iterable<? extends ActionInput> files) {
ImmutableList.Builder<Artifact> builder = ImmutableList.<Artifact>builder();
for (ActionInput file : files) {
builder.add((Artifact) file);
}
return builder.build();
}
}
| |
package com.muqdd.iuob2.app;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by Ali Yusuf on 4/8/2017.
* iUOB-2
*/
@SuppressWarnings({"WeakerAccess","unused"})
public class Constants {
private static Map<String, String> debCodeMap;
public final static String SB_SECTIONS_LIST = "SB_SECTIONS_LIST";
public final static String SB_COURSES_LIST = "SB_COURSES_LIST";
public final static List<String> collegesNameList = Arrays.asList("Arts", "Science", "Information Technology",
"Business Administration", "Applied Studies", "Physical Education and Physiotherapy",
"Engineering", "Bahrain Teachers College", "Health Sciences", "Law");
// {"College of Arts", "College of Business Administration",
// "College of Engineering", "College of Science", "College of Education", "French Studies Center",
// "College of Information Technology", "College of Applied Studies", "College of Law",
// "Bahrain Teachers College", "Physical Education and Physiotherapy", "College of Health Sciences",
// "English Language Center", "Sharia'a", "Science Research", "College of Physical Education",
// "Confucius Institute", "Reserved", "Language Centers", "Confucius Institute", "Languages Institute",
// "Unit for Teaching Excellence and Leadership"};
public final static String[] coursesNameList = {"ACC","ACA","ACCA","ACCM","AH","ALH","AMST","ARAB","ARABA",
"ARABM","ARCG","ART","BAA","BIOLS","BIONU","BIS","CEA","CEG","CENG","CGS","CHE","CHEMY",
"CHENG","CHL","COM","CSA","CSC","DH","ECON","ECONA","ECONM","EDAR","EDEG","EDPS","EDTC",
"EDU","EEDA","EEG","EENG","ELE","ENG","ENGG","ENGL",/*"ENGL.",*/"ENGLA","ENGLM","ENGLR","ENGLU",
"EPD","ESD",/*"ESP.",*/"ETDA","EVALU","FA","FIN","FINA","FINM","FOUN","FREN","GEOG","GERM",
"HIST","HISTO","HRLC","IEN","INTD","ISLM","IT","ITBIS","ITCE","ITCS","ITIS","JAPN","LAW","ITSE","ITNE",
/*"LAW.",*/"LFS","MATHA","MATHS",/*"MATHS.",*/"MCM","MEDA","MEG","MENG","MGT","MGTA","MISA",
"MKT","MKTA","MLS","MLT","MPHYS","NUR","OMA","PHA","PHAM","PHED","PHEDE","PHTY","PHYCS",
"PHYCSA","PICDA","PICENG","PSYC","PSYCH","QM","RAD","SBF","SBS","SOCIO","STAT","STATA",
"TC1AR","TC1ART","TC1EN","TC1IS","TC1MA","TC1MAT","TC1SC","TC1SCT","TC2AR","TC2ART",
"TC2EN","TC2ENT","TC2IS","TC2IST","TC2MA","TC2MAT","TC2SC","TC2SCT","TCDE","TCDEE",
"TCDEGS","TCDEIT","TCDEM","TCEL","TCFN","TCPB","TOUR","TRAN"};
public final static String[] coursesList = {"ACC112","ACC113","ACC211","ACC221","ACC231","ACC310",
"ACC311","ACC325","ACC332","ACC341","ACC410","ACC411","ACC416","ACC451","ACC470","ACC491",
"ACC610","ACCA121","ACCA221","AMST202","AMST212","AMST213","AMST214","AMST411","AMST413",
"ARAB100","ARAB110","ARAB111","ARAB119","ARAB141","ARAB181","ARAB210","ARAB212","ARAB213",
"ARAB216","ARAB227","ARAB237","ARAB242","ARAB313","ARAB315","ARAB325","ARAB328","ARAB333",
"ARAB334","ARAB337","ARAB338","ARAB339","ARAB341","ARAB343","ARAB353","ARAB414","ARAB416",
"ARAB424","ARAB426","ARAB437","ARAB439","ARAB441","ARAB443","ARAB445","ARAB446","ARAB447",
"ARAB448","ARAB454","ARAB488","ARABA111","ARABM535","ARABM548","ARABM552","ARABM558",
"ARABM560","ARCG120","ARCG121","ARCG220","ARCG221","ARCG222","ARCG223","ARCG224","ARCG320",
"ARCG321","ARCG322","ARCG323","ARCG420","ARCG421","ARCG422","ARCG520","ARCG521","ARCG522",
"ARCG550","ARCG553","ARCG555","ARCG557","ARCG558","ARCG559","ARCG561","ART112","ART133",
"ART141","ART221","BAA110","BAA120","BAA121","BAA122","BAA230","BAA231","BAA250","BAA251",
"BAA260","BIOLS102","BIOLS103","BIOLS171","BIOLS175","BIOLS222","BIOLS232","BIOLS315",
"BIOLS320","BIOLS352","BIOLS370","BIOLS371","BIOLS383","BIOLS409","BIOLS424","BIOLS427",
"BIOLS442","BIOLS451","BIOLS454","BIOLS456","BIOLS463","BIOLS464","BIOLS465","BIOLS481",
"BIS202","BIS315","CEA112","CEA122","CEA123","CEA124","CEA233","CEA242","CEG211","CEG225",
"CEG315","CEG325","CENG131","CENG160","CENG200","CENG201","CENG202","CENG211","CENG212",
"CENG231","CENG242","CENG290","CENG301","CENG302","CENG311","CENG312","CENG314","CENG321",
"CENG322","CENG331","CENG341","CENG400","CENG406","CENG411","CENG415","CENG417","CENG421",
"CENG426","CENG431","CENG435","CENG436","CENG442","CENG451","CENG491","CHEMY101","CHEMY102",
"CHEMY106","CHEMY211","CHEMY220","CHEMY221","CHEMY223","CHEMY224","CHEMY310","CHEMY312",
"CHEMY313","CHEMY322","CHEMY331","CHEMY341","CHEMY348","CHEMY351","CHEMY411","CHEMY421",
"CHEMY435","CHEMYA101","CHENG111","CHENG211","CHENG212","CHENG213","CHENG214","CHENG242",
"CHENG290","CHENG301","CHENG312","CHENG313","CHENG314","CHENG315","CHENG316","CHENG323",
"CHENG324","CHENG325","CHENG400","CHENG415","CHENG421","CHENG422","CHENG423","CHENG425",
"CHENG443","CHENG445","CHENG460","CHENG491","CHL101","COM524","COM525","COM526","COM540",
"COM564","COM565","COM566","COM570","COM590","CSA101","CSA106","CSA111","CSA112","CSA113",
"CSA114","CSA121","CSA126","CSA131","CSA136","CSA211","CSA212","CSA213","CSA214","CSA217",
"CSA218","CSA219","CSA223","CSA231","CSA236","CSA241","CSA242","CSA266","CSA271","CSA276","CSC103",
"ECON131","ECON140","ECON141","ECON248","ECON340","ECON341","ECON440","ECON441","ECON640",
"ECONA121","EDAR126","EDEG211","EDPS241","EDPS285","EDTC100","EEDA101","EEDA102","EEDA109",
"EEDA202","EEDA211","EEDA212","EEDA213","EEDA214","EEDA231","EEDA241","EEDA242","EEDA243",
"EEDA244","EEDA280","EEG271","EEG510","EEG554","EEG577","EEG580","EENG100","EENG109",
"EENG200","EENG204","EENG205","EENG209","EENG242","EENG251","EENG259","EENG261","EENG262",
"EENG269","EENG271","EENG290","EENG301","EENG302","EENG311","EENG333","EENG334","EENG341",
"EENG342","EENG343","EENG349","EENG352","EENG353","EENG355","EENG361","EENG364","EENG371",
"EENG372","EENG373","EENG381","EENG400","EENG412","EENG413","EENG414","EENG415","EENG416",
"EENG417","EENG433","EENG438","EENG444","EENG447","EENG448","EENG449","EENG451","EENG461",
"EENG462","EENG463","EENG470","EENG472","EENG473","EENG479","EENG483","EENG485","EENG491",
"ENGL101","ENGL102","ENGL111","ENGL112","ENGL114","ENGL125","ENGL126","ENGL130","ENGL135",
"ENGL145","ENGL146","ENGL154","ENGL155","ENGL191","ENGL192","ENGL205","ENGL215","ENGL219",
"ENGL235","ENGL250","ENGL305","ENGL308","ENGL309","ENGL313","ENGL314","ENGL315","ENGL331",
"ENGL340","ENGL341","ENGL342","ENGL345","ENGL346","ENGL350","ENGL405","ENGL419","ENGL440",
"ENGL444","ENGL446","ENGL450","ENGL540","ENGL541","ENGL542","ENGL543","ENGL544","ENGL545",
"ENGLA111","ENGLA112","ENGLA120","ENGLA210","ENGLM401","ENGLM402","ENGLU203","ESD524",
"ENGLR001", "ENGLR002", "ENGLR003", "ENGLR004", "ENGLR005", "ENGLR006",
"ESD538","ETDA161","EVALU558","EVALU559","FIN220","FIN221","FIN222","FIN320","FIN323",
"FIN331","FIN411","FIN424","FIN425","FIN426","FIN428","FIN435","FIN620","FIN623","FIN625",
"FIN629","FIN698","FINA200","FOUN321","FREN141","FREN142","FREN231","FREN232","FREN310",
"FREN312","FREN313","FREN411","GEOG102","GERM101","HIST122","HISTO191","HISTO212",
"HISTO225", "HISTO230","HISTO281","HISTO301","HISTO302","HISTO305","HISTO306","HISTO309",
"HISTO311","HISTO407","HISTO408","HISTO409","HISTO413","HISTO414","HISTO415","HISTO418",
"HISTO419","HISTO429","IEN509","IEN511","IEN536","INTD120","INTD121","INTD220","INTD221",
"INTD222","INTD223","INTD224","INTD320","INTD321","INTD322","INTD323","INTD324","INTD420",
"INTD421","INTD422","INTD424","INTD425","ISLM101","ISLM114","ISLM136","ISLM141","ISLM217",
"ISLM231","ISLM240","ISLM241","ISLM243","ISLM252","ISLM281","ISLM305","ISLM316","ISLM317",
"ISLM321","ISLM323","ISLM326","ISLM327","ISLM333","ISLM339","ISLM344","ISLM346","ISLM401",
"ISLM414","ISLM415","ISLM421","ISLM436","ISLM445","ISLM446","ISLM448","ISLM453","ISLM463",
"ISLM474","ISLM475","ISLM488","ITBIS105","ITBIS211","ITBIS251","ITBIS311","ITBIS322",
"ITBIS324","ITBIS341","ITBIS351","ITBIS373","ITBIS385","ITBIS393","ITBIS395","ITBIS396",
"ITBIS420","ITBIS431","ITBIS435","ITBIS438","ITBIS445","ITBIS450","ITBIS465","ITBIS472",
"ITBIS492","ITBIS494","ITBIS499","ITCE202","ITCE250","ITCE251","ITCE260","ITCE263",
"ITCE272","ITCE300","ITCE311","ITCE313","ITCE314","ITCE315","ITCE320","ITCE321","ITCE341",
"ITCE344","ITCE351","ITCE352","ITCE362","ITCE363","ITCE380","ITCE414","ITCE416","ITCE431",
"ITCE436","ITCE444","ITCE455","ITCE470","ITCE471","ITCE472","ITCE474","ITCE498","ITCS102",
"ITCS103","ITCS104","ITCS111","ITCS112","ITCS113","ITCS114","ITCS215","ITCS216","ITCS241",
"ITCS242","ITCS251","ITCS252","ITCS253","ITCS311","ITCS312","ITCS314","ITCS315","ITCS322",
"ITCS323","ITCS332","ITCS341","ITCS345","ITCS346","ITCS351","ITCS373","ITCS385","ITCS390",
"ITCS393","ITCS395","ITCS399","ITCS412","ITCS420","ITCS439","ITCS447","ITCS452","ITCS473",
"ITCS479","ITCS490","ITCS495","ITIS101","ITIS102","ITIS211","ITIS216","ITIS253","ITIS311",
"ITIS312","ITIS313","ITIS314","ITIS331","ITIS341","ITIS342","ITIS343","ITIS351","ITIS411",
"ITIS412","ITIS413","ITIS441","ITIS442","ITIS443","ITIS444","ITIS445","ITIS453","ITIS461",
"ITIS462","ITIS464","ITIS475","ITIS476","ITIS482","ITIS499", "ITSE201", "ITNE110", "ITNE231",
"JAPN101","LAW101","LAW102",
"LAW104","LAW106","LAW107","LAW109","LAW110","LAW210","LAW211","LAW214","LAW215","LAW221",
"LAW222","LAW224","LAW225","LAW227","LAW238","LAW302","LAW307","LAW312","LAW315","LAW317",
"LAW318","LAW322","LAW324","LAW325","LAW327","LAW328","LAW402","LAW403","LAW407","LAW409",
"LAW412","LAW414","LAW415","LAW416","LAW417","LAW418","LAW419","LAW422","LAW423","LAW425",
"LAW427","LAW429","LAW499","LAW511","LAW513","LAW516","LAW522","LAW524","LAW529","MATHA111",
"MATHS101","MATHS102","MATHS103","MATHS104","MATHS108","MATHS121","MATHS122","MATHS203",
"MATHS204","MATHS205","MATHS211","MATHS253","MATHS303","MATHS304","MATHS305","MATHS307",
"MATHS311","MATHS312","MATHS331","MATHS341","MATHS342","MATHS381","MATHS385","MATHS395",
"MATHS401","MATHS402","MATHS415","MATHS417","MATHS452","MATHS461","MATHS500","MATHS562",
"MATHS582","MCM110","MCM120","MCM130","MCM140","MCM150","MCM201","MCM202","MCM210","MCM220",
"MCM230","MCM250","MCM260","MCM301","MCM302","MCM310","MCM311","MCM312","MCM313","MCM314",
"MCM315","MCM316","MCM318","MCM320","MCM321","MCM322","MCM323","MCM324","MCM325","MCM330",
"MCM331","MCM332","MCM333","MCM334","MCM336","MCM340","MCM341","MCM342","MCM345","MCM350",
"MCM351","MCM352","MCM353","MCM354","MCM355","MCM356","MCM360","MCM361","MCM362","MCM401",
"MCM410","MCM411","MCM412","MCM413","MCM414","MCM415","MCM420","MCM421","MCM422","MCM423",
"MCM425","MCM430","MCM431","MCM432","MCM433","MCM440","MCM441","MCM442","MCM443","MCM450",
"MCM451","MCM452","MCM453","MCM454","MCM460","MCM461","MEDA111","MEDA112","MEDA121",
"MEDA161","MEDA162","MEDA213","MEDA214","MEDA215","MEDA222","MEDA223","MEDA231","MEDA232",
"MEDA233","MEDA271","MEG435","MEG501","MEG514","MENG110","MENG160","MENG163","MENG201",
"MENG210","MENG230","MENG231","MENG235","MENG242","MENG263","MENG274","MENG290","MENG300",
"MENG310","MENG334","MENG335","MENG371","MENG373","MENG375","MENG380","MENG381","MENG384",
"MENG400","MENG420","MENG423","MENG430","MENG440","MENG442","MENG473","MENG475","MENG485",
"MENG490","MENG491","MGT131","MGT230","MGT233","MGT236","MGT239","MGT340","MGT341","MGT429",
"MGT430","MGT433","MGT434","MGT437","MGT439","MGT446","MGT447","MGT460","MGT501","MGT630",
"MGT632","MGT633","MGT635","MGT638","MGT639","MGT698","MGTA121","MGTA140","MGTA160",
"MGTA222","MGTA231","MGTA240","MGTA242","MGTA247","MGTA251","MGTA260","MGTA262","MGTA264",
"MGTA290","MGTA299","MISA121","MISA123","MISA138","MISA210","MISA233","MISA240","MISA244",
"MISA260","MKT261","MKT263","MKT264","MKT268","MKT361","MKT362","MKT364","MKT367","MKT460",
"MKT461","MKT462","MKT463","MKT464","MKT465","MKT660","MKTA221","MPHYS325","MPHYS372",
"MPHYS374","MPHYS476","MPHYS477","OMA121","OMA140","OMA160","OMA222","OMA231","OMA240",
"OMA242","OMA247","OMA260","OMA262","PHED555","PHED572","PHED577","PHED583","PHED584",
"PHED704","PHED707","PHED709","PHED712","PHED724","PHEDE101","PHEDE102","PHEDE103",
"PHEDE104","PHEDE106","PHEDE114","PHEDE115","PHEDE116","PHEDE117","PHEDE118","PHEDE200",
"PHEDE201","PHEDE202","PHEDE203","PHEDE204","PHEDE205","PHEDE206","PHEDE207","PHEDE208",
"PHEDE209","PHEDE210","PHEDE211","PHEDE213","PHEDE216","PHEDE217","PHEDE218","PHEDE301",
"PHEDE302","PHEDE304","PHEDE305","PHEDE307","PHEDE308","PHEDE309","PHEDE310","PHEDE312",
"PHEDE313","PHEDE314","PHEDE315","PHEDE317","PHEDE401","PHEDE402","PHEDE404","PHEDE411",
"PHEDE418","PHEDE419","PHTY180","PHTY286","PHTY291","PHTY294","PHTY298","PHTY382",
"PHTY384","PHTY387","PHTY388","PHTY394","PHTY396","PHTY494","PHTY499","PHYCS101",
"PHYCS102","PHYCS104","PHYCS106","PHYCS111","PHYCS181","PHYCS209","PHYCS210","PHYCS221",
"PHYCS222","PHYCS241","PHYCS324","PHYCS333","PHYCS334","PHYCS351","PHYCS353","PHYCS425",
"PHYCS428","PHYCS432","PHYCS471","PHYCS484","PHYCS526","PHYCS541","PHYCS551","PHYCS553",
"PHYCS554","PHYCS558","PHYCSA101","PICDA111","PICDA121","PICDA141","PICDA212","PICDA222",
"PICDA223","PICDA224","PICDA225","PICDA226","PICDA231","PICDA232","PICDA233","PICENG111",
"PICENG212","PICENG213","PICENG214","PICENG215","PICENG216","PICENG226","PICENG242",
"PICENG290","PICENG321","PICENG322","PICENG325","PICENG326","PICENG331","PICENG332",
"PICENG400","PICENG411","PICENG422","PICENG425","PICENG433","PICENG434","PICENG464",
"PICENG491","PSYC103","PSYC120","PSYC211","PSYC221","PSYC224","PSYC251","PSYC290",
"PSYC323","PSYC324","PSYC325","PSYC347","PSYC417","PSYCH561","PSYCH562","PSYCH563",
"PSYCH570","PSYCH571","PSYCH591","QM250","QM350","QM353","QM650","SBF150","SBF250",
"SBF260","SBF270","SOCIO161","SOCIO181","SOCIO191","SOCIO221","SOCIO222","SOCIO223",
"SOCIO224","SOCIO225","SOCIO226","SOCIO281","SOCIO321","SOCIO324","SOCIO326","SOCIO328",
"SOCIO329","SOCIO332","SOCIO333","SOCIO337","SOCIO341","SOCIO382","SOCIO420","SOCIO422",
"SOCIO424","SOCIO425","SOCIO426","SOCIO427","SOCIO428","STAT105","STAT271","STAT272",
"STAT273","STAT371","STAT372","STAT373","STAT374","STAT378","STAT381","STAT385","STAT391",
"STAT392","STAT393","STAT394","STAT473","STAT474","STAT476","STAT479","STATA231","TOUR101",
"TOUR102","TOUR211","TOUR220","TOUR231","TOUR315","TOUR355","TOUR363","TOUR380","TOUR418",
"TOUR421","TOUR463","TOUR498","TRAN208","TRAN303","TRAN304","TRAN305","TRAN401","TRAN403",
"TRAN404","HRLC107"
};
public static Map<String, String> getDebCodeMap() {
if (debCodeMap == null) {
debCodeMap = new HashMap<>();
debCodeMap.put("ACC","031");
debCodeMap.put("ACCA","A17");
debCodeMap.put("ACCM","131");
debCodeMap.put("AH","S20");
debCodeMap.put("ALH","S19");
debCodeMap.put("AMST","108");
debCodeMap.put("ARAB","047");
debCodeMap.put("ARABA","A24");
debCodeMap.put("ARABM","147");
debCodeMap.put("ARCG","292");
debCodeMap.put("ART","079");
debCodeMap.put("BAA","A40");
debCodeMap.put("BIOLS","087");
debCodeMap.put("BIONU","187");
debCodeMap.put("BIS","058");
debCodeMap.put("CEA","A25");
debCodeMap.put("CEG","112");
debCodeMap.put("CENG","325");
debCodeMap.put("CGS","S25");
debCodeMap.put("CHE","013");
debCodeMap.put("CHEMY","086");
debCodeMap.put("CHENG","353");
debCodeMap.put("CHL","114");
debCodeMap.put("COM","128");
debCodeMap.put("CSA","A13");
debCodeMap.put("CSC","081");
debCodeMap.put("DH","S23");
debCodeMap.put("ECON","034");
debCodeMap.put("ECONA","A18");
debCodeMap.put("ECONM","134");
debCodeMap.put("EDAR","366");
debCodeMap.put("EDEG","166");
debCodeMap.put("EDPS","177");
debCodeMap.put("EDTC","266");
debCodeMap.put("EDU","S30");
debCodeMap.put("EEDA","A29");
debCodeMap.put("EEG","110");
debCodeMap.put("EENG","345");
debCodeMap.put("ELE","S12");
debCodeMap.put("ENG","S28");
debCodeMap.put("ENGG","377");
debCodeMap.put("ENGL","049");
debCodeMap.put("ENGL.","D11");
debCodeMap.put("ENGLA","A11");
debCodeMap.put("ENGLM","149");
debCodeMap.put("ENGLU","009");
debCodeMap.put("EPD","S26");
debCodeMap.put("ESD","444");
debCodeMap.put("ESP.","D16");
debCodeMap.put("ETDA","A33");
debCodeMap.put("EVALU","195");
debCodeMap.put("FA","179");
debCodeMap.put("FIN","032");
debCodeMap.put("FINA","A21");
debCodeMap.put("FINM","132");
debCodeMap.put("FOUN","466");
debCodeMap.put("FREN","078");
debCodeMap.put("GEOG","076");
debCodeMap.put("GERM","107");
debCodeMap.put("HIST","075");
debCodeMap.put("HISTO","175");
debCodeMap.put("HRLC","771");
debCodeMap.put("IEN","002");
debCodeMap.put("INTD","191");
debCodeMap.put("ISLM","074");
debCodeMap.put("IT","558");
debCodeMap.put("ITBIS","158");
debCodeMap.put("ITCE","333");
debCodeMap.put("ITCS","222");
debCodeMap.put("ITIS","458");
debCodeMap.put("JAPN","100");
debCodeMap.put("LAW","080");
debCodeMap.put("LAW.","808");
debCodeMap.put("LFS","S24");
debCodeMap.put("MATHA","A12");
debCodeMap.put("MATHS","083");
debCodeMap.put("MATHS.","D15");
debCodeMap.put("MCM","228");
debCodeMap.put("MEDA","A30");
debCodeMap.put("MEG","111");
debCodeMap.put("MENG","314");
debCodeMap.put("MGT","030");
debCodeMap.put("MGTA","A15");
debCodeMap.put("MISA","A16");
debCodeMap.put("MKT","033");
debCodeMap.put("MKTA","A20");
debCodeMap.put("MLS","S32");
debCodeMap.put("MLT","S21");
debCodeMap.put("MPHYS","285");
debCodeMap.put("NUR","S11");
debCodeMap.put("OMA","A60");
debCodeMap.put("PHA","S18");
debCodeMap.put("PHAM","S31");
debCodeMap.put("PHED","001");
debCodeMap.put("PHEDE","200");
debCodeMap.put("PHTY","220");
debCodeMap.put("PHYCS","085");
debCodeMap.put("PHYCSA","A31");
debCodeMap.put("PICDA","A34");
debCodeMap.put("PICENG","355");
debCodeMap.put("PSYC","077");
debCodeMap.put("PSYCH","277");
debCodeMap.put("QM","035");
debCodeMap.put("RAD","S22");
debCodeMap.put("SBF","777");
debCodeMap.put("SBS","S27");
debCodeMap.put("SOCIO","173");
debCodeMap.put("STAT","096");
debCodeMap.put("STATA","A19");
debCodeMap.put("TC1AR","E27");
debCodeMap.put("TC1ART","E40");
debCodeMap.put("TC1EN","E28");
debCodeMap.put("TC1IS","E44");
debCodeMap.put("TC1MA","E25");
debCodeMap.put("TC1MAT","E39");
debCodeMap.put("TC1SC","E26");
debCodeMap.put("TC1SCT","E24");
debCodeMap.put("TC2AR","E35");
debCodeMap.put("TC2ART","E36");
debCodeMap.put("TC2EN","E33");
debCodeMap.put("TC2ENT","E34");
debCodeMap.put("TC2IS","E37");
debCodeMap.put("TC2IST","E38");
debCodeMap.put("TC2MA","E32");
debCodeMap.put("TC2MAT","E42");
debCodeMap.put("TC2SC","E31");
debCodeMap.put("TC2SCT","E41");
debCodeMap.put("TCDE","E12");
debCodeMap.put("TCDEE","E18");
debCodeMap.put("TCDEGS","E14");
debCodeMap.put("TCDEIT","E23");
debCodeMap.put("TCDEM","E13");
debCodeMap.put("TCEL","E45");
debCodeMap.put("TCFN","E55");
debCodeMap.put("TCPB","E11");
debCodeMap.put("TOUR","027");
debCodeMap.put("TRAN","082");
}
return debCodeMap;
}
public static String getDebCode(String k){
return getDebCodeMap().get(k);
}
}
| |
/*
* Copyright (C) 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fathom;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.joran.JoranConfigurator;
import ch.qos.logback.core.joran.spi.JoranException;
import ch.qos.logback.core.util.StatusPrinter;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import fathom.conf.Settings;
import fathom.exception.FathomException;
import fathom.utils.Util;
import org.apache.commons.daemon.Daemon;
import org.apache.commons.daemon.DaemonContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
/**
* Boot starts/stops Fathom and can be optionally used as a Commons-Daemon Service.
* <p>
* http://commons.apache.org/proper/commons-daemon
*
* @author James Moger
*/
public class Boot implements Daemon {
public final static String LOGBACK_CONFIGURATION_FILE_PROPERTY = "logback.configurationFile";
private static final Logger log = LoggerFactory.getLogger(Boot.class);
private static Object SERVICE = new Object();
private static Boot boot;
private final Settings settings;
private Server server;
public Boot() {
this(new Settings());
}
public Boot(String[] args) {
settings = new Settings(args);
init();
}
public Boot(Constants.Mode mode) {
this(mode == null ? new Settings() : new Settings(mode));
}
public Boot(Settings settings) {
this.settings = settings;
init();
}
/**
* Called by prunsrv (Windows) to start the service.
*
* @param args
*/
public static void start(String args[]) {
log.debug("windowsStart called");
boot = new Boot();
boot.start();
// block until interrupted by stop() or the undertow dies
while (boot.getServer().isRunning()) {
synchronized (SERVICE) {
try {
SERVICE.wait(30000); // wait 30 seconds and check if stopped
} catch (InterruptedException ie) {
}
}
}
}
/**
* Called by prunsrv (Windows) to stop the service.
*
* @param args
*/
public static void stop(String args[]) {
log.debug("windowsStop called");
boot.stop();
synchronized (SERVICE) {
// notify the thread synchronized on SERVER in start()
SERVICE.notify();
}
}
/**
* The Java entry point.
*
* @param args
*/
public static void main(String... args) {
try {
final Boot boot = new Boot(args);
boot.addShutdownHook().start();
} catch (Exception e) {
Exception root = (Exception) Throwables.getRootCause(e);
root.printStackTrace();
System.exit(1);
}
}
/**
* Setup the Boot instance.
*/
protected void init() {
System.setProperty("java.awt.headless", "true");
setupLogback();
}
public Settings getSettings() {
return settings;
}
public Server getServer() {
if (server == null) {
server = new Server();
server.setSettings(settings);
}
return server;
}
/**
* Add a JVM shutdown hook.
*/
public Boot addShutdownHook() {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
Boot.this.stop();
}
});
return this;
}
/**
* Called by prunsrv (Windows) or jsvc (UNIX) before start().
*
* @param context
* @throws Exception
*/
@Override
public void init(DaemonContext context) throws Exception {
log.debug("Fathom Daemon initialized");
settings.applyArgs(context.getArguments());
}
/**
* Starts Fathom synchronously.
*/
@Override
public synchronized void start() {
Preconditions.checkNotNull(getServer());
String osName = System.getProperty("os.name");
String osVersion = System.getProperty("os.version");
log.info("Bootstrapping {} ({})", settings.getApplicationName(), settings.getApplicationVersion());
Util.logSetting(log, "Fathom", Constants.getVersion());
Util.logSetting(log, "Mode", settings.getMode().toString());
Util.logSetting(log, "Operating System", String.format("%s (%s)", osName, osVersion));
Util.logSetting(log, "Available processors", Runtime.getRuntime().availableProcessors());
Util.logSetting(log, "Available heap", (Runtime.getRuntime().maxMemory() / (1024 * 1024)) + " MB");
SimpleDateFormat df = new SimpleDateFormat("z Z");
df.setTimeZone(TimeZone.getDefault());
String offset = df.format(new Date());
Util.logSetting(log, "JVM timezone", String.format("%s (%s)", TimeZone.getDefault().getID(), offset));
Util.logSetting(log, "JVM locale", Locale.getDefault());
long startTime = System.nanoTime();
getServer().start();
String contextPath = settings.getContextPath();
if (settings.getHttpsPort() > 0) {
log.info("https://{}:{}{}", settings.getHttpsListenAddress(), settings.getHttpsPort(), contextPath);
}
if (settings.getHttpPort() > 0) {
log.info("http://{}:{}{}", settings.getHttpListenAddress(), settings.getHttpPort(), contextPath);
}
if (settings.getAjpPort() > 0) {
log.info("ajp://{}:{}{}", settings.getAjpListenAddress(), settings.getAjpPort(), contextPath);
}
long delta = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
String duration;
if (delta < 1000L) {
duration = String.format("%s ms", delta);
} else {
duration = String.format("%.1f seconds", (delta / 1000f));
}
log.info("Fathom bootstrapped {} mode in {}", settings.getMode().toString(), duration);
log.info("READY.");
}
/**
* Stops Fathom synchronously.
*/
@Override
public synchronized void stop() {
Preconditions.checkNotNull(getServer());
if (getServer().isRunning()) {
try {
log.info("Stopping...");
getServer().stop();
log.info("STOPPED.");
} catch (Exception e) {
Throwable t = Throwables.getRootCause(e);
log.error("Fathom failed on shutdown!", t);
}
}
}
/**
* Called by prunsrv (Windows) or jsrv (UNIX) after stop().
*/
@Override
public void destroy() {
log.debug("Destroyed Fathom");
}
/**
* Setup Logback logging by optionally reloading the configuration file.
*/
protected void setupLogback() {
// Check for Logback config file System Property
// http://logback.qos.ch/manual/configuration.html
// -Dlogback.configurationFile=logback_prod.xml
if (System.getProperty(LOGBACK_CONFIGURATION_FILE_PROPERTY) != null) {
// Logback already configured
return;
}
// Check for a logback configuration file declared in Fathom settings
URL configFileUrl = settings.getFileUrl(LOGBACK_CONFIGURATION_FILE_PROPERTY, "classpath:conf/logback.xml");
if (configFileUrl == null) {
throw new FathomException("Failed to find Logback config file '{}'",
settings.getString(LOGBACK_CONFIGURATION_FILE_PROPERTY, "classpath:conf/logback.xml"));
}
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
try (InputStream is = configFileUrl.openStream()) {
JoranConfigurator configurator = new JoranConfigurator();
configurator.setContext(context);
context.reset();
configurator.doConfigure(is);
log.info("Configured Logback from '{}'", configFileUrl);
} catch (IOException | JoranException je) {
// StatusPrinter will handle this
}
StatusPrinter.printInCaseOfErrorsOrWarnings(context);
}
}
| |
package com.firebase.drawing;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PorterDuff;
import android.graphics.RectF;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Toast;
import com.firebase.client.ChildEventListener;
import com.firebase.client.DataSnapshot;
import com.firebase.client.Firebase;
import com.firebase.client.FirebaseError;
import com.firebase.client.Logger;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class DrawingView extends View {
public static final int PIXEL_SIZE = 8;
private Paint mPaint;
private int mLastX;
private int mLastY;
private Canvas mBuffer;
private Bitmap mBitmap;
private Paint mBitmapPaint;
private Firebase mFirebaseRef;
private ChildEventListener mListener;
private int mCurrentColor = 0xFFFF0000;
private Path mPath;
private Set<String> mOutstandingSegments;
private Segment mCurrentSegment;
private float mScale = 1.0f;
private int mCanvasWidth;
private int mCanvasHeight;
public DrawingView(Context context, Firebase ref) {
this(context, ref, 1.0f);
}
public DrawingView(Context context, Firebase ref, int width, int height) {
this(context, ref);
this.setBackgroundColor(Color.DKGRAY);
mCanvasWidth = width;
mCanvasHeight = height;
}
public DrawingView(Context context, Firebase ref, float scale) {
super(context);
mOutstandingSegments = new HashSet<String>();
mPath = new Path();
this.mFirebaseRef = ref;
this.mScale = scale;
mListener = ref.addChildEventListener(new ChildEventListener() {
/**
* @param dataSnapshot The data we need to construct a new Segment
* @param previousChildName Supplied for ordering, but we don't really care about ordering in this app
*/
@Override
public void onChildAdded(DataSnapshot dataSnapshot, String previousChildName) {
String name = dataSnapshot.getKey();
// To prevent lag, we draw our own segments as they are created. As a result, we need to check to make
// sure this event is a segment drawn by another user before we draw it
if (!mOutstandingSegments.contains(name)) {
// Deserialize the data into our Segment class
Segment segment = dataSnapshot.getValue(Segment.class);
drawSegment(segment, paintFromColor(segment.getColor()));
// Tell the view to redraw itself
invalidate();
}
}
@Override
public void onChildChanged(DataSnapshot dataSnapshot, String s) {
// No-op
}
@Override
public void onChildRemoved(DataSnapshot dataSnapshot) {
// No-op
}
@Override
public void onChildMoved(DataSnapshot dataSnapshot, String s) {
// No-op
}
@Override
public void onCancelled(FirebaseError firebaseError) {
// No-op
}
});
mPaint = new Paint();
mPaint.setAntiAlias(true);
mPaint.setDither(true);
mPaint.setColor(0xFFFF0000);
mPaint.setStyle(Paint.Style.STROKE);
mBitmapPaint = new Paint(Paint.DITHER_FLAG);
}
public void cleanup() {
mFirebaseRef.removeEventListener(mListener);
}
public void setColor(int color) {
mCurrentColor = color;
mPaint.setColor(color);
}
public void clear() {
mBitmap = Bitmap.createBitmap(mBitmap.getWidth(), mBitmap.getHeight(), Bitmap.Config.ARGB_8888);
mBuffer = new Canvas(mBitmap);
mCurrentSegment = null;
mOutstandingSegments.clear();
invalidate();
}
@Override
protected void onSizeChanged(int w, int h, int oldW, int oldH) {
super.onSizeChanged(w, h, oldW, oldH);
mScale = Math.min(1.0f * w / mCanvasWidth, 1.0f * h / mCanvasHeight);
mBitmap = Bitmap.createBitmap(Math.round(mCanvasWidth * mScale), Math.round(mCanvasHeight * mScale), Bitmap.Config.ARGB_8888);
mBuffer = new Canvas(mBitmap);
Log.i("AndroidDrawing", "onSizeChanged: created bitmap/buffer of "+mBitmap.getWidth()+"x"+mBitmap.getHeight());
}
@Override
protected void onDraw(Canvas canvas) {
canvas.drawColor(Color.DKGRAY);
canvas.drawRect(0, 0, mBitmap.getWidth(), mBitmap.getHeight(), paintFromColor(Color.WHITE, Paint.Style.FILL_AND_STROKE));
canvas.drawBitmap(mBitmap, 0, 0, mBitmapPaint);
canvas.drawPath(mPath, mPaint);
}
public static Paint paintFromColor(int color) {
return paintFromColor(color, Paint.Style.STROKE);
}
public static Paint paintFromColor(int color, Paint.Style style) {
Paint p = new Paint();
p.setAntiAlias(true);
p.setDither(true);
p.setColor(color);
p.setStyle(style);
return p;
}
public static Path getPathForPoints(List<Point> points, double scale) {
Path path = new Path();
scale = scale * PIXEL_SIZE;
Point current = points.get(0);
path.moveTo(Math.round(scale * current.x), Math.round(scale * current.y));
Point next = null;
for (int i = 1; i < points.size(); ++i) {
next = points.get(i);
path.quadTo(
Math.round(scale * current.x), Math.round(scale * current.y),
Math.round(scale * (next.x + current.x) / 2), Math.round(scale * (next.y + current.y) / 2)
);
current = next;
}
if (next != null) {
path.lineTo(Math.round(scale * next.x), Math.round(scale * next.y));
}
return path;
}
private void drawSegment(Segment segment, Paint paint) {
if (mBuffer != null) {
mBuffer.drawPath(getPathForPoints(segment.getPoints(), mScale), paint);
}
}
private void onTouchStart(float x, float y) {
mPath.reset();
mPath.moveTo(x, y);
mCurrentSegment = new Segment(mCurrentColor);
mLastX = (int) x / PIXEL_SIZE;
mLastY = (int) y / PIXEL_SIZE;
mCurrentSegment.addPoint(mLastX, mLastY);
}
private void onTouchMove(float x, float y) {
int x1 = (int) x / PIXEL_SIZE;
int y1 = (int) y / PIXEL_SIZE;
float dx = Math.abs(x1 - mLastX);
float dy = Math.abs(y1 - mLastY);
if (dx >= 1 || dy >= 1) {
mPath.quadTo(mLastX * PIXEL_SIZE, mLastY * PIXEL_SIZE, ((x1 + mLastX) * PIXEL_SIZE) / 2, ((y1 + mLastY) * PIXEL_SIZE) / 2);
mLastX = x1;
mLastY = y1;
mCurrentSegment.addPoint(mLastX, mLastY);
}
}
private void onTouchEnd() {
mPath.lineTo(mLastX * PIXEL_SIZE, mLastY * PIXEL_SIZE);
mBuffer.drawPath(mPath, mPaint);
mPath.reset();
Firebase segmentRef = mFirebaseRef.push();
final String segmentName = segmentRef.getKey();
mOutstandingSegments.add(segmentName);
// create a scaled version of the segment, so that it matches the size of the board
Segment segment = new Segment(mCurrentSegment.getColor());
for (Point point: mCurrentSegment.getPoints()) {
segment.addPoint((int)Math.round(point.x / mScale), (int)Math.round(point.y / mScale));
}
// Save our segment into Firebase. This will let other clients see the data and add it to their own canvases.
// Also make a note of the outstanding segment name so we don't do a duplicate draw in our onChildAdded callback.
// We can remove the name from mOutstandingSegments once the completion listener is triggered, since we will have
// received the child added event by then.
segmentRef.setValue(segment, new Firebase.CompletionListener() {
@Override
public void onComplete(FirebaseError error, Firebase firebaseRef) {
if (error != null) {
Log.e("AndroidDrawing", error.toString());
throw error.toException();
}
mOutstandingSegments.remove(segmentName);
}
});
}
@Override
public boolean onTouchEvent(MotionEvent event) {
float x = event.getX();
float y = event.getY();
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
onTouchStart(x, y);
invalidate();
break;
case MotionEvent.ACTION_MOVE:
onTouchMove(x, y);
invalidate();
break;
case MotionEvent.ACTION_UP:
onTouchEnd();
invalidate();
break;
}
return true;
}
}
| |
package com.red_folder.phonegap.plugin.gpsservice.models;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
import com.red_folder.phonegap.plugin.gpsservice.logic.TransportStrategy;
public class LocationModel implements TransportStrategy.ITransportData {
/*
************************************************************************************************
* Static values
************************************************************************************************
*/
private static String TAG = LocationModel.class.getSimpleName();
/*
************************************************************************************************
* Keys
************************************************************************************************
*/
public final static String MAP_KEY_HEADING = "heading";
public final static String MAP_KEY_ALTITUDE = "altitude";
public final static String MAP_KEY_ACCURACY = "accuracy";
public final static String MAP_KEY_LONGITUDE = "longitude";
public final static String MAP_KEY_LATITUDE = "latitude";
public final static String MAP_KEY_SPEED = "speed";
public final static String MAP_KEY_ALTITUDE_ACCURACY = "altitudeAccuracy";
public final static String MAP_KEY_TIMESTAMP = "timestamp";
/*
************************************************************************************************
* Internal Data
************************************************************************************************
*/
// Location data and the lock to ensure only single access
private final Object mLocationDataLock = new Object();
private double mLongitude = 0;
private double mLatitude = 0;
private float mAccuracy = 0;
private double mAltitude = 0;
private float mAltitudeAccuracy = 0;
private float mHeading = 0;
private float mSpeed = 0;
private long mTimestamp = 0;
private Date mUpdated = null;
/*
************************************************************************************************
* Constructor
************************************************************************************************
*/
public LocationModel() {
}
public LocationModel(double pLongitude,
double pLatitude,
float pAccuracy,
double pAltitude,
float pAltitudeAccuracy,
float pHeading,
float pSpeed,
long pTimestamp) {
this.setLocation(pLongitude, pLatitude, pAccuracy, pAltitude, pAltitudeAccuracy, pHeading, pSpeed, pTimestamp);
}
/*
************************************************************************************************
* Fields
************************************************************************************************
*/
public double getLongitude() {
double result;
synchronized (this.mLocationDataLock) {
result = this.mLongitude;
}
return result;
}
public double getLatitude() {
double result;
synchronized (this.mLocationDataLock) {
result = this.mLatitude;
}
return result;
}
public float getAccuracy() {
float result;
synchronized (this.mLocationDataLock) {
result = this.mAccuracy;
}
return result;
}
public double getAltitude() {
double result;
synchronized (this.mLocationDataLock) {
result = this.mAltitude;
}
return result;
}
public float getAltitudeAccuracy() {
float result;
synchronized (this.mLocationDataLock) {
result = this.mAltitudeAccuracy;
}
return result;
}
public float getHeading() {
float result;
synchronized (this.mLocationDataLock) {
result = this.mHeading;
}
return result;
}
public float getSpeed() {
float result;
synchronized (this.mLocationDataLock) {
result = this.mSpeed;
}
return result;
}
public long getTimestamp() {
long result;
synchronized (this.mLocationDataLock) {
result = this.mTimestamp;
}
return result;
}
/*
************************************************************************************************
* Public
************************************************************************************************
*/
public void setLocation(double pLongitude,
double pLatitude,
float pAccuracy,
double pAltitude,
float pAltitudeAccuracy,
float pHeading,
float pSpeed,
long pTimestamp) {
synchronized (this.mLocationDataLock) {
this.mLongitude = pLongitude;
this.mLatitude = pLatitude;
this.mAccuracy = pAccuracy;
this.mAltitude = pAltitude;
this.mAltitudeAccuracy = pAltitudeAccuracy;
this.mHeading = pHeading;
this.mSpeed = pSpeed;
this.mTimestamp = pTimestamp;
this.mUpdated = Calendar.getInstance().getTime();
this.mLocationDataLock.notifyAll();
}
}
public boolean isUndefined() {
boolean result = false;
synchronized (mLocationDataLock) {
if (this.mUpdated == null)
result = true;
}
return result;
}
public void clear() {
synchronized (this.mLocationDataLock) {
this.mLongitude = 0;
this.mLatitude = 0;
this.mAccuracy = 0;
this.mAltitude = 0;
this.mAltitudeAccuracy = 0;
this.mHeading = 0;
this.mSpeed = 0;
this.mTimestamp = 0;
this.mUpdated = null;
}
}
/*
************************************************************************************************
* Private methods
************************************************************************************************
*/
/*
************************************************************************************************
* Implemented method
************************************************************************************************
*/
@Override
public String getQueryString(Map<String, String> map) {
String query = new String();
if (map.containsKey(MAP_KEY_LONGITUDE)) {
query += map.get(MAP_KEY_LONGITUDE) + "=" + String.valueOf(this.getLongitude());
query += "&";
}
if (map.containsKey(MAP_KEY_LATITUDE)) {
query += map.get(MAP_KEY_LATITUDE) + "=" + String.valueOf(this.getLatitude());
query += "&";
}
if (map.containsKey(MAP_KEY_ACCURACY)) {
query += map.get(MAP_KEY_ACCURACY) + "=" + String.valueOf(this.getAccuracy());
query += "&";
}
if (map.containsKey(MAP_KEY_ALTITUDE)) {
query += map.get(MAP_KEY_ALTITUDE) + "=" + String.valueOf(this.getAltitude());
query += "&";
}
if (map.containsKey(MAP_KEY_ALTITUDE_ACCURACY)) {
query += map.get(MAP_KEY_ALTITUDE_ACCURACY) + "=" + String.valueOf(this.getAltitudeAccuracy());
query += "&";
}
if (map.containsKey(MAP_KEY_HEADING)) {
query += map.get(MAP_KEY_HEADING) + "=" + String.valueOf(this.getHeading());
query += "&";
}
if (map.containsKey(MAP_KEY_SPEED)) {
query += map.get(MAP_KEY_SPEED) + "=" + String.valueOf(this.getSpeed());
query += "&";
}
if (map.containsKey(MAP_KEY_TIMESTAMP)) {
query += map.get(MAP_KEY_TIMESTAMP) + "=" + String.valueOf(this.getTimestamp());
}
return query;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.thrift;
import static com.facebook.buck.jvm.java.JavaCompilationConstants.DEFAULT_JAVAC_OPTIONS;
import static org.hamcrest.junit.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.jvm.java.DefaultJavaLibrary;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.ImmutableFlavor;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.CommandTool;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.FakeExportDependenciesRule;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.args.StringArg;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.nio.file.Paths;
public class ThriftJavaEnhancerTest {
private static final BuildTarget TARGET = BuildTargetFactory.newInstance("//:test#java");
private static final BuildTarget JAVA_LIB_TARGET =
BuildTargetFactory.newInstance("//java:library");
private static final BuckConfig BUCK_CONFIG = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"thrift", ImmutableMap.of("java_library", JAVA_LIB_TARGET.toString())))
.build();
private static final ThriftBuckConfig THRIFT_BUCK_CONFIG = new ThriftBuckConfig(BUCK_CONFIG);
private static final ThriftJavaEnhancer ENHANCER = new ThriftJavaEnhancer(
THRIFT_BUCK_CONFIG,
DEFAULT_JAVAC_OPTIONS);
private static FakeBuildRule createFakeBuildRule(
String target,
SourcePathResolver resolver,
BuildRule... deps) {
return new FakeBuildRule(
new FakeBuildRuleParamsBuilder(BuildTargetFactory.newInstance(target))
.setDeclaredDeps(ImmutableSortedSet.copyOf(deps))
.build(), resolver);
}
private static ThriftCompiler createFakeThriftCompiler(
String target,
SourcePathResolver resolver) {
return new ThriftCompiler(
new FakeBuildRuleParamsBuilder(BuildTargetFactory.newInstance(target)).build(),
resolver,
new CommandTool.Builder()
.addArg(new StringArg("compiler"))
.build(),
ImmutableList.of(),
Paths.get("output"),
new FakeSourcePath("source"),
"language",
ImmutableSet.of(),
ImmutableList.of(),
ImmutableSet.of(),
ImmutableMap.of(),
ImmutableSortedSet.of());
}
@Test
public void getLanguage() {
assertEquals(
"java",
ENHANCER.getLanguage());
}
@Test
public void getFlavor() {
assertEquals(
ImmutableFlavor.of("java"),
ENHANCER.getFlavor());
}
@Test
public void getOptions() {
ThriftConstructorArg arg = new ThriftConstructorArg();
ImmutableSet<String> options;
// Test empty options.
options = ImmutableSet.of();
arg.javaOptions = options;
assertEquals(
options,
ENHANCER.getOptions(TARGET, arg));
// Test set options.
options = ImmutableSet.of("test", "option");
arg.javaOptions = options;
assertEquals(
options,
ENHANCER.getOptions(TARGET, arg));
}
@Test
public void getImplicitDeps() {
ThriftConstructorArg arg = new ThriftConstructorArg();
// Verify that setting "thrift:java_library" in the buck config propagates that
// dep via the getImplicitDeps method.
assertEquals(
ImmutableSet.of(JAVA_LIB_TARGET),
ENHANCER.getImplicitDepsForTargetFromConstructorArg(TARGET, arg));
}
@Test
public void createBuildRule() throws NoSuchBuildTargetException {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(new SourcePathRuleFinder(resolver));
BuildRuleParams flavoredParams = new FakeBuildRuleParamsBuilder(TARGET).build();
// Add a dummy dependency to the constructor arg to make sure it gets through.
ThriftConstructorArg arg = new ThriftConstructorArg();
// Setup up some thrift inputs to pass to the createBuildRule method.
ImmutableMap<String, ThriftSource> sources = ImmutableMap.of(
"test1.thrift", new ThriftSource(
createFakeThriftCompiler("//:thrift_source1", pathResolver),
ImmutableList.of(),
Paths.get("output1")),
"test2.thrift", new ThriftSource(
createFakeThriftCompiler("//:thrift_source2", pathResolver),
ImmutableList.of(),
Paths.get("output2")));
// Create a dummy implicit dep to pass in.
ImmutableSortedSet<BuildRule> deps = ImmutableSortedSet.of(
createFakeBuildRule("//:dep", pathResolver));
// Run the enhancer to create the language specific build rule.
DefaultJavaLibrary library = (DefaultJavaLibrary) ENHANCER
.createBuildRule(TargetGraph.EMPTY, flavoredParams, resolver, arg, sources, deps);
// Verify that the first thrift source created a source zip rule with correct deps.
BuildRule srcZip1 = resolver.getRule(
ENHANCER.getSourceZipBuildTarget(TARGET.getUnflavoredBuildTarget(), "test1.thrift"));
assertNotNull(srcZip1);
assertTrue(srcZip1 instanceof SrcZip);
assertEquals(
ImmutableSortedSet.<BuildRule>of(sources.get("test1.thrift").getCompileRule()),
srcZip1.getDeps());
// Verify that the second thrift source created a source zip rule with correct deps.
BuildRule srcZip2 = resolver.getRule(
ENHANCER.getSourceZipBuildTarget(TARGET.getUnflavoredBuildTarget(), "test2.thrift"));
assertNotNull(srcZip2);
assertTrue(srcZip2 instanceof SrcZip);
assertEquals(
ImmutableSortedSet.<BuildRule>of(sources.get("test2.thrift").getCompileRule()),
srcZip2.getDeps());
// Verify that the top-level default java lib has correct deps.
assertEquals(
ImmutableSortedSet.<BuildRule>naturalOrder()
.addAll(deps)
.add(srcZip1)
.add(srcZip2)
.build(),
library.getDeps());
}
@Test
public void exportedDeps() throws NoSuchBuildTargetException {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(new SourcePathRuleFinder(resolver));
BuildRuleParams flavoredParams =
new FakeBuildRuleParamsBuilder(TARGET).build();
// Add a dummy dependency to the constructor arg to make sure it gets through.
ThriftConstructorArg arg = new ThriftConstructorArg();
// Setup up some thrift inputs to pass to the createBuildRule method.
ImmutableMap<String, ThriftSource> sources = ImmutableMap.of(
"test.thrift", new ThriftSource(
createFakeThriftCompiler("//:thrift_source", pathResolver),
ImmutableList.of(),
Paths.get("output")));
// Create a dep chain with an exported dep.
FakeBuildRule exportedRule =
resolver.addToIndex(new FakeBuildRule("//:exported_rule", pathResolver));
FakeExportDependenciesRule exportingRule =
resolver.addToIndex(
new FakeExportDependenciesRule("//:exporting_rule", pathResolver, exportedRule));
// Run the enhancer to create the language specific build rule.
DefaultJavaLibrary library = (DefaultJavaLibrary) ENHANCER
.createBuildRule(
TargetGraph.EMPTY,
flavoredParams,
resolver,
arg,
sources,
ImmutableSortedSet.of(exportingRule));
assertThat(library.getDeps(), Matchers.<BuildRule>hasItem(exportedRule));
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.security.impl;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.GeneralSecurityException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.List;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import io.undertow.security.api.AuthenticationMechanism;
import io.undertow.security.api.GSSAPIServerSubjectFactory;
import io.undertow.security.api.SecurityContext;
import io.undertow.security.idm.Account;
import io.undertow.security.idm.GSSContextCredential;
import io.undertow.security.idm.IdentityManager;
import io.undertow.server.HttpServerExchange;
import io.undertow.server.ServerConnection;
import io.undertow.server.handlers.proxy.ExclusivityChecker;
import io.undertow.util.AttachmentKey;
import io.undertow.util.FlexBase64;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.Oid;
import static io.undertow.util.Headers.AUTHORIZATION;
import static io.undertow.util.Headers.HOST;
import static io.undertow.util.Headers.NEGOTIATE;
import static io.undertow.util.Headers.WWW_AUTHENTICATE;
import static io.undertow.util.StatusCodes.UNAUTHORIZED;
/**
* {@link io.undertow.security.api.AuthenticationMechanism} for GSSAPI / SPNEGO based authentication.
* <p>
* GSSAPI authentication is associated with the HTTP connection, as long as a connection is being re-used allow the
* authentication state to be re-used.
* <p>
* TODO - May consider an option to allow it to also be associated with the underlying session but that has it's own risks so
* would need to come with a warning.
*
* @author <a href="mailto:darran.lofthouse@jboss.com">Darran Lofthouse</a>
*/
public class GSSAPIAuthenticationMechanism implements AuthenticationMechanism {
public static final ExclusivityChecker EXCLUSIVITY_CHECKER = new ExclusivityChecker() {
@Override
public boolean isExclusivityRequired(HttpServerExchange exchange) {
List<String> authHeaders = exchange.getRequestHeaders().get(AUTHORIZATION);
if (authHeaders != null) {
for (String current : authHeaders) {
if (current.startsWith(NEGOTIATE_PREFIX)) {
return true;
}
}
}
return false;
}
};
private static final String NEGOTIATION_PLAIN = NEGOTIATE.toString();
private static final String NEGOTIATE_PREFIX = NEGOTIATE + " ";
private static final Oid[] DEFAULT_MECHANISMS;
static {
try {
Oid spnego = new Oid("1.3.6.1.5.5.2");
Oid kerberos = new Oid("1.2.840.113554.1.2.2");
DEFAULT_MECHANISMS = new Oid[] { spnego, kerberos };
} catch (GSSException e) {
throw new RuntimeException(e);
}
}
private final String name = "SPNEGO";
private final IdentityManager identityManager;
private final GSSAPIServerSubjectFactory subjectFactory;
private final Oid[] mechanisms;
public GSSAPIAuthenticationMechanism(final GSSAPIServerSubjectFactory subjectFactory, IdentityManager identityManager, Oid ...supportedMechanisms) {
this.subjectFactory = subjectFactory;
this.identityManager = identityManager;
this.mechanisms = supportedMechanisms;
}
public GSSAPIAuthenticationMechanism(final GSSAPIServerSubjectFactory subjectFactory, Oid ...supportedMechanisms) {
this(subjectFactory, null, supportedMechanisms);
}
public GSSAPIAuthenticationMechanism(final GSSAPIServerSubjectFactory subjectFactory) {
this(subjectFactory, DEFAULT_MECHANISMS);
}
@SuppressWarnings("deprecation")
private IdentityManager getIdentityManager(SecurityContext securityContext) {
return identityManager != null ? identityManager : securityContext.getIdentityManager();
}
@Override
public AuthenticationMechanismOutcome authenticate(final HttpServerExchange exchange,
final SecurityContext securityContext) {
ServerConnection connection = exchange.getConnection();
NegotiationContext negContext = connection.getAttachment(NegotiationContext.ATTACHMENT_KEY);
if (negContext != null) {
exchange.putAttachment(NegotiationContext.ATTACHMENT_KEY, negContext);
if (negContext.isEstablished()) {
IdentityManager identityManager = getIdentityManager(securityContext);
final Account account = identityManager.verify(new GSSContextCredential(negContext.getGssContext()));
if (account != null) {
securityContext.authenticationComplete(account, name, false);
return AuthenticationMechanismOutcome.AUTHENTICATED;
} else {
return AuthenticationMechanismOutcome.NOT_AUTHENTICATED;
}
}
}
List<String> authHeaders = exchange.getRequestHeaders().get(AUTHORIZATION);
if (authHeaders != null) {
for (String current : authHeaders) {
if (current.startsWith(NEGOTIATE_PREFIX)) {
String base64Challenge = current.substring(NEGOTIATE_PREFIX.length());
try {
ByteBuffer challenge = FlexBase64.decode(base64Challenge);
return runGSSAPI(exchange, challenge, securityContext);
} catch (IOException e) {
}
// By this point we had a header we should have been able to verify but for some reason
// it was not correctly structured.
return AuthenticationMechanismOutcome.NOT_AUTHENTICATED;
}
}
}
// No suitable header was found so authentication was not even attempted.
return AuthenticationMechanismOutcome.NOT_ATTEMPTED;
}
public ChallengeResult sendChallenge(final HttpServerExchange exchange, final SecurityContext securityContext) {
NegotiationContext negContext = exchange.getAttachment(NegotiationContext.ATTACHMENT_KEY);
String header = NEGOTIATION_PLAIN;
if (negContext != null) {
byte[] responseChallenge = negContext.useResponseToken();
exchange.putAttachment(NegotiationContext.ATTACHMENT_KEY, null);
if (responseChallenge != null) {
header = NEGOTIATE_PREFIX + FlexBase64.encodeString(responseChallenge, false);
}
} else {
Subject server = null;
try {
server = subjectFactory.getSubjectForHost(getHostName(exchange));
} catch (GeneralSecurityException e) {
// Deliberately ignore - no Subject so don't offer GSSAPI is our main concern here.
}
if (server == null) {
return new ChallengeResult(false);
}
}
exchange.getResponseHeaders().add(WWW_AUTHENTICATE, header);
return new ChallengeResult(true, UNAUTHORIZED);
}
public AuthenticationMechanismOutcome runGSSAPI(final HttpServerExchange exchange,
final ByteBuffer challenge, final SecurityContext securityContext) {
try {
Subject server = subjectFactory.getSubjectForHost(getHostName(exchange));
// The AcceptSecurityContext takes over responsibility for setting the result.
return Subject.doAs(server, new AcceptSecurityContext(exchange, challenge, securityContext));
} catch (GeneralSecurityException e) {
e.printStackTrace();
return AuthenticationMechanismOutcome.NOT_AUTHENTICATED;
} catch (PrivilegedActionException e) {
e.printStackTrace();
return AuthenticationMechanismOutcome.NOT_AUTHENTICATED;
}
}
private String getHostName(final HttpServerExchange exchange) {
String hostName = exchange.getRequestHeaders().getFirst(HOST);
if (hostName != null) {
if (hostName.startsWith("[") && hostName.contains("]")) {
hostName = hostName.substring(0, hostName.indexOf(']') + 1);
} else if (hostName.contains(":")) {
hostName = hostName.substring(0, hostName.indexOf(":"));
}
return hostName;
}
return null;
}
private class AcceptSecurityContext implements PrivilegedExceptionAction<AuthenticationMechanismOutcome> {
private final HttpServerExchange exchange;
private final ByteBuffer challenge;
private final SecurityContext securityContext;
private AcceptSecurityContext(final HttpServerExchange exchange,
final ByteBuffer challenge, final SecurityContext securityContext) {
this.exchange = exchange;
this.challenge = challenge;
this.securityContext = securityContext;
}
public AuthenticationMechanismOutcome run() throws GSSException {
NegotiationContext negContext = exchange.getAttachment(NegotiationContext.ATTACHMENT_KEY);
if (negContext == null) {
negContext = new NegotiationContext();
exchange.putAttachment(NegotiationContext.ATTACHMENT_KEY, negContext);
// Also cache it on the connection for future calls.
exchange.getConnection().putAttachment(NegotiationContext.ATTACHMENT_KEY, negContext);
}
GSSContext gssContext = negContext.getGssContext();
if (gssContext == null) {
GSSManager manager = GSSManager.getInstance();
GSSCredential credential = manager.createCredential(null, GSSCredential.INDEFINITE_LIFETIME, mechanisms, GSSCredential.ACCEPT_ONLY);
gssContext = manager.createContext(credential);
negContext.setGssContext(gssContext);
}
byte[] respToken = gssContext.acceptSecContext(challenge.array(), challenge.arrayOffset(), challenge.limit());
negContext.setResponseToken(respToken);
if (negContext.isEstablished()) {
if (respToken != null) {
// There will be no further challenge but we do have a token so set it here.
exchange.getResponseHeaders().add(WWW_AUTHENTICATE,
NEGOTIATE_PREFIX + FlexBase64.encodeString(respToken, false));
}
IdentityManager identityManager = securityContext.getIdentityManager();
final Account account = identityManager.verify(new GSSContextCredential(negContext.getGssContext()));
if (account != null) {
securityContext.authenticationComplete(account, name, false);
return AuthenticationMechanismOutcome.AUTHENTICATED;
} else {
return AuthenticationMechanismOutcome.NOT_AUTHENTICATED;
}
} else {
// This isn't a failure but as the context is not established another round trip with the client is needed.
return AuthenticationMechanismOutcome.NOT_AUTHENTICATED;
}
}
}
private static class NegotiationContext {
static final AttachmentKey<NegotiationContext> ATTACHMENT_KEY = AttachmentKey.create(NegotiationContext.class);
private GSSContext gssContext;
private byte[] responseToken;
private Principal principal;
GSSContext getGssContext() {
return gssContext;
}
void setGssContext(GSSContext gssContext) {
this.gssContext = gssContext;
}
byte[] useResponseToken() {
// The token only needs to be returned once so clear it once used.
try {
return responseToken;
} finally {
responseToken = null;
}
}
void setResponseToken(byte[] responseToken) {
this.responseToken = responseToken;
}
boolean isEstablished() {
return gssContext != null ? gssContext.isEstablished() : false;
}
Principal getPrincipal() {
if (!isEstablished()) {
throw new IllegalStateException("No established GSSContext to use for the Principal.");
}
if (principal == null) {
try {
principal = new KerberosPrincipal(gssContext.getSrcName().toString());
} catch (GSSException e) {
throw new IllegalStateException("Unable to create Principal", e);
}
}
return principal;
}
}
}
| |
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.zuul.context;
import com.netflix.client.http.HttpResponse;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.runners.MockitoJUnitRunner;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
/**
* Extended RequestContext adding Netflix library specific concepts and data
*
* @author Mikey Cohen
* Date: 12/23/11
* Time: 1:14 PM
*/
public class NFRequestContext extends RequestContext {
private static final String EVENT_PROPS_KEY = "eventProperties";
static {
RequestContext.setContextClass(NFRequestContext.class);
}
/**
* creates a new NFRequestContext
*/
public NFRequestContext() {
super();
put(EVENT_PROPS_KEY, new HashMap<String, Object>());
}
/**
* returns a NFRequestContext from the threadLocal
*
* @return
*/
public static NFRequestContext getCurrentContext() {
return (NFRequestContext) RequestContext.threadLocal.get();
}
/**
* returns the routeVIP; that is the Eureka "vip" of registered instances
*
* @return
*/
public String getRouteVIP() {
return (String) get("routeVIP");
}
/**
* sets routeVIP; that is the Eureka "vip" of registered instances
*
* @return
*/
public void setRouteVIP(String sVip) {
set("routeVIP", sVip);
}
/**
* @return true if a routeHost or routeVip has been defined
*/
public boolean hasRouteVIPOrHost() {
return (getRouteVIP() != null) || (getRouteHost() != null);
}
/**
* unsets the requestContextVariables
*/
public void unset() {
if (getZuulResponse() != null) {
getZuulResponse().close(); //check this?
}
super.unset();
}
/**
* sets the requestEntity; the inputStream of the Request
*
* @param entity
*/
public void setRequestEntity(InputStream entity) {
set("requestEntity", entity);
}
/**
* @return the requestEntity; the inputStream of the request
*/
public InputStream getRequestEntity() {
return (InputStream) get("requestEntity");
}
/**
* Sets the HttpResponse response that comes back from a Ribbon client.
*
* @param response
*/
public void setZuulResponse(HttpResponse response) {
set("zuulResponse", response);
}
/**
* gets the "zuulResponse"
*
* @return returns the HttpResponse from a Ribbon call to an origin
*/
public HttpResponse getZuulResponse() {
return (HttpResponse) get("zuulResponse");
}
/**
* returns the "route". This is a Zuul defined bucket for collecting request metrics. By default the route is the
* first segment of the uri eg /get/my/stuff : route is "get"
*
* @return
*/
public String getRoute() {
return (String) get("route");
}
public void setEventProperty(String key, Object value) {
getEventProperties().put(key, value);
}
public Map<String, Object> getEventProperties() {
return (Map<String, Object>) this.get(EVENT_PROPS_KEY);
}
@RunWith(MockitoJUnitRunner.class)
public static class UnitTest {
@Mock
private HttpResponse clientResponse;
@Before
public void before() {
RequestContext.getCurrentContext().unset();
RequestContext.setContextClass(NFRequestContext.class);
MockitoAnnotations.initMocks(this);
}
@Test
public void testGetContext() {
RequestContext.setContextClass(NFRequestContext.class);
NFRequestContext context = NFRequestContext.getCurrentContext();
assertNotNull(context);
Assert.assertEquals(context.getClass(), NFRequestContext.class);
RequestContext context1 = RequestContext.getCurrentContext();
assertNotNull(context1);
Assert.assertEquals(context1.getClass(), NFRequestContext.class);
}
@Test
public void testSetContextVariable() {
NFRequestContext context = NFRequestContext.getCurrentContext();
assertNotNull(context);
context.set("test", "moo");
Assert.assertEquals(context.get("test"), "moo");
}
@Test
public void testNFRequestContext() {
NFRequestContext context = NFRequestContext.getCurrentContext();
context.setZuulResponse(clientResponse);
assertEquals(context.getZuulResponse(), clientResponse);
context.setRouteVIP("vip");
assertEquals("vip", context.getRouteVIP());
}
}
}
| |
package jsky.image.gui;
import java.awt.Color;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.print.*;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.print.PrintService;
import javax.print.PrintServiceLookup;
import javax.print.attribute.HashPrintRequestAttributeSet;
import javax.swing.JComponent;
import jsky.coords.WorldCoordinateConverter;
import jsky.coords.WorldCoords;
import jsky.util.I18N;
import jsky.util.Preferences;
import jsky.util.gui.SwingWorker;
import jsky.util.gui.DialogUtil;
import jsky.util.gui.PrintPreview;
import jsky.util.gui.ProgressPanel;
/**
* Displays a print dialog box for printing the current image display
* and handles the details of printing the image and graphics.
*/
public class ImagePrintDialog implements Printable, ActionListener {
// Used to access internationalized strings (see i18n/gui*.proprties)
private static final I18N _I18N = I18N.getInstance(ImagePrintDialog.class);
// Base name of file used to store printer settings (under ~/.jsky)
private static final String _ATTR_FILE = "ImagePrintDialog.printerAttr";
// The target image display
private MainImageDisplay _imageDisplay;
// Used to display a print dialog
private PrinterJob _printerJob;
// Saves user's printer settings
private HashPrintRequestAttributeSet _printerAttr;
// Panel used to display print progress
private ProgressPanel _progressPanel;
// Font used for printing text (headers and footers)
private static final Font PRINTING_FONT = Font.decode("SansSerif-8");
private boolean _newPrint;
private double _printOffsetX;
private double _printOffsetY;
private final SimpleDateFormat _dateFormatter = new SimpleDateFormat("MM/dd/yy HH:mm:ss");
/**
* Initialize with the target image display object.
*/
public ImagePrintDialog(MainImageDisplay imageDisplay) {
this._imageDisplay = imageDisplay;
}
/**
* Display a preview of the image to be printed in a popup window.
*/
public void preview() {
SwingWorker worker = new SwingWorker() {
public Object construct() {
try {
String title = _imageDisplay.getObjectName();
if (title == null)
title = _imageDisplay.getFilename();
if (title == null)
title = _I18N.getString("printPreview");
startPrint(_I18N.getString("preparingImage"));
return new PrintPreview(ImagePrintDialog.this, ImagePrintDialog.this, title);
} catch (Exception e) {
return e;
}
}
public void finished() {
_progressPanel.stop();
_progressPanel.setTitle(_I18N.getString("printingImage"));
Object o = getValue();
if (o instanceof Exception) {
DialogUtil.error((Exception) o);
} else if (o instanceof PrintPreview) {
PrintPreview pp = (PrintPreview) o;
pp.setVisible(true);
}
}
};
worker.start();
}
/**
* Called for the Print button in the preview window
*/
public void actionPerformed(ActionEvent ae) {
try {
print();
} catch (Exception e) {
DialogUtil.error(e);
}
}
/**
* Prints the contents of the current image display image area.
* Prompts user with standard print dialog boxes first.
*/
public void print() throws PrinterException {
// Get a PrinterJob
if (_printerJob == null)
_printerJob = PrinterJob.getPrinterJob();
_printerJob.setJobName(_I18N.getString("imageDisplay"));
_printerJob.setPrintable(this);
// restore the user's previous printer selection
String prefKey = getClass().getName() + ".printer";
String printer = Preferences.get(prefKey);
if (printer != null) {
PrintService[] ar = PrintServiceLookup.lookupPrintServices(null, null);
for (PrintService anAr : ar) {
if (printer.equals(anAr.getName())) {
_printerJob.setPrintService(anAr);
break;
}
}
}
try {
// restore any the printer attributes from a previous session, if needed
_restorePrinterAttr();
if (_printerJob.printDialog(_printerAttr)) {
// remember the printer name
PrintService ps = _printerJob.getPrintService();
if (ps == null)
return;
Preferences.set(prefKey, ps.getName());
// save the printer attributes for future sessions
_savePrinterAttr();
// print the table (this will call the print method below)
new PrintWorker().start();
}
} catch (Exception e) {
DialogUtil.error(e);
}
}
// save the printer attributes for future sessions
private void _savePrinterAttr() {
if (_printerAttr != null) {
try {
Preferences.getPreferences().serialize(_ATTR_FILE, _printerAttr);
} catch (Exception e) {
e.printStackTrace();
}
}
}
// Restore any printer attributes from the previous session
private void _restorePrinterAttr() {
if (_printerAttr == null) {
try {
_printerAttr = (HashPrintRequestAttributeSet) Preferences.getPreferences().deserialize(_ATTR_FILE);
} catch (Exception e) {
_printerAttr = new HashPrintRequestAttributeSet();
}
}
}
/**
* For the Printable interface: Render the image contents onto a
* printable graphics context. Provides the ability to print the
* image canvas contents.
*/
public int print(Graphics g, PageFormat pf, int pageIndex) throws PrinterException {
Graphics2D g2d = (Graphics2D) g;
JComponent canvas = _imageDisplay.getCanvas();
int canvasWidth = canvas.getWidth();
int canvasHeight = canvas.getHeight();
if (pageIndex > 0)
return Printable.NO_SUCH_PAGE;
boolean progress = true;
if (_newPrint) {
// Remember the original clip offset
_newPrint = false;
progress = false; // No progress event first time because of irregular clip bounds
Rectangle r = g2d.getClipBounds();
if (r != null) {
_printOffsetX = r.x;
_printOffsetY = r.y;
}
}
// Compute the scale
double scale = Math.min((pf.getImageableWidth() - 20) / (double) canvasWidth,
(pf.getImageableHeight() - 20) / (double) canvasHeight);
// Draw the footer text
// Just draws name of first image.
// Probably should rethink how this works for multiple images.
// Determine default file name
String footer = _imageDisplay.getObjectName();
if (footer == null)
footer = _imageDisplay.getFilename();
if (footer == null) {
if (_imageDisplay.isWCS()) {
WorldCoordinateConverter wcc = _imageDisplay.getWCS();
WorldCoords center = new WorldCoords(wcc.getWCSCenter(), wcc.getEquinox());
footer = center.toString();
} else {
footer = _I18N.getString("blankImage");
}
}
FontMetrics metrics = canvas.getFontMetrics(PRINTING_FONT);
int width = metrics.stringWidth(footer) + 6;
int height = metrics.getHeight() + 4;
g2d.setColor(Color.black);
g2d.setFont(PRINTING_FONT);
g2d.drawString(footer,
(float) _printOffsetX,
(float) (((canvasHeight + height) * scale) + pf.getImageableY()));
footer = _dateFormatter.format(new Date());
width = metrics.stringWidth(footer) + 6;
g2d.drawString(footer,
(float) (_printOffsetX + ((canvasWidth - width) * scale) - 15),
(float) (((canvasHeight + height) * scale) + pf.getImageableY()));
// Translate and scale the graphics to fit on the page
g2d.translate(_printOffsetX, _printOffsetY);
g2d.scale(scale, scale);
// Clip the canvas drawing so that none of the Viewable objects are drawn
// outside of the image area.
int y = 0;
int x = 0;
int h = canvasHeight;
int w = canvasWidth;
if (g2d.getClipBounds() != null) {
x = g2d.getClipBounds().x;
y = g2d.getClipBounds().y;
w = g2d.getClipBounds().width;
h = g2d.getClipBounds().height;
if (x + w > canvasWidth) {
w = canvasWidth;
}
if (y + h > canvasHeight) {
h = Math.max(0, canvasHeight - y);
}
}
g2d.setClip(x, y, w, h);
// Paint canvas objects onto the image.
_imageDisplay.paintImageAndGraphics(g2d);
if (progress) {
int percent = (int) Math.min(100, Math.floor(((double) (y + h) / (double) canvasHeight) * 100.0));
_progressPanel.setProgress(percent);
}
return Printable.PAGE_EXISTS;
}
/**
* Initialize printing. This method must be called at the beginning of any
* print operation because the print() method will be called multiple times.
*
* @param msg the message for the progress dialog
*/
public void startPrint(String msg) {
_newPrint = true;
_printOffsetX = 0.0;
_printOffsetY = 0.0;
if (_progressPanel == null)
_progressPanel = ProgressPanel.makeProgressPanel(msg);
else
_progressPanel.setTitle(msg);
_progressPanel.start();
}
/**
* Performs all the print calculations in a separate thread.
* A progress bar is shown to the user while the printing occurs.
*/
protected class PrintWorker extends SwingWorker {
public PrintWorker() {
startPrint(_I18N.getString("printing"));
}
public Object construct() {
try {
_progressPanel.setProgress(5);
_printerJob.print(_printerAttr);
} catch (Exception ex) {
return ex;
}
return null;
}
public void finished() {
_progressPanel.stop();
Object o = getValue();
if (o instanceof Exception) {
DialogUtil.error((Exception) o);
}
}
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver15;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
import java.util.Arrays;
class OFPortDescPropRecirculateVer15 implements OFPortDescPropRecirculate {
private static final Logger logger = LoggerFactory.getLogger(OFPortDescPropRecirculateVer15.class);
// version: 1.5
final static byte WIRE_VERSION = 6;
final static int MINIMUM_LENGTH = 4;
// maximum OF message length: 16 bit, unsigned
final static int MAXIMUM_LENGTH = 0xFFFF;
private final static byte[] DEFAULT_PORT_NOS = new byte[0];
// OF message fields
private final byte[] portNos;
//
// Immutable default instance
final static OFPortDescPropRecirculateVer15 DEFAULT = new OFPortDescPropRecirculateVer15(
DEFAULT_PORT_NOS
);
// package private constructor - used by readers, builders, and factory
OFPortDescPropRecirculateVer15(byte[] portNos) {
if(portNos == null) {
throw new NullPointerException("OFPortDescPropRecirculateVer15: property portNos cannot be null");
}
this.portNos = portNos;
}
// Accessors for OF message fields
@Override
public int getType() {
return 0x4;
}
@Override
public byte[] getPortNos() {
return portNos;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
public OFPortDescPropRecirculate.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFPortDescPropRecirculate.Builder {
final OFPortDescPropRecirculateVer15 parentMessage;
// OF message fields
private boolean portNosSet;
private byte[] portNos;
BuilderWithParent(OFPortDescPropRecirculateVer15 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0x4;
}
@Override
public byte[] getPortNos() {
return portNos;
}
@Override
public OFPortDescPropRecirculate.Builder setPortNos(byte[] portNos) {
this.portNos = portNos;
this.portNosSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFPortDescPropRecirculate build() {
byte[] portNos = this.portNosSet ? this.portNos : parentMessage.portNos;
if(portNos == null)
throw new NullPointerException("Property portNos must not be null");
//
return new OFPortDescPropRecirculateVer15(
portNos
);
}
}
static class Builder implements OFPortDescPropRecirculate.Builder {
// OF message fields
private boolean portNosSet;
private byte[] portNos;
@Override
public int getType() {
return 0x4;
}
@Override
public byte[] getPortNos() {
return portNos;
}
@Override
public OFPortDescPropRecirculate.Builder setPortNos(byte[] portNos) {
this.portNos = portNos;
this.portNosSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
//
@Override
public OFPortDescPropRecirculate build() {
byte[] portNos = this.portNosSet ? this.portNos : DEFAULT_PORT_NOS;
if(portNos == null)
throw new NullPointerException("Property portNos must not be null");
return new OFPortDescPropRecirculateVer15(
portNos
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFPortDescPropRecirculate> {
@Override
public OFPortDescPropRecirculate readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0x4
short type = bb.readShort();
if(type != (short) 0x4)
throw new OFParseError("Wrong type: Expected=0x4(0x4), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
byte[] portNos = ChannelUtils.readBytes(bb, length - (bb.readerIndex() - start));
OFPortDescPropRecirculateVer15 portDescPropRecirculateVer15 = new OFPortDescPropRecirculateVer15(
portNos
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", portDescPropRecirculateVer15);
return portDescPropRecirculateVer15;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFPortDescPropRecirculateVer15Funnel FUNNEL = new OFPortDescPropRecirculateVer15Funnel();
static class OFPortDescPropRecirculateVer15Funnel implements Funnel<OFPortDescPropRecirculateVer15> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFPortDescPropRecirculateVer15 message, PrimitiveSink sink) {
// fixed value property type = 0x4
sink.putShort((short) 0x4);
// FIXME: skip funnel of length
sink.putBytes(message.portNos);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFPortDescPropRecirculateVer15> {
@Override
public void write(ByteBuf bb, OFPortDescPropRecirculateVer15 message) {
int startIndex = bb.writerIndex();
// fixed value property type = 0x4
bb.writeShort((short) 0x4);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeBytes(message.portNos);
// update length field
int length = bb.writerIndex() - startIndex;
if (length > MAXIMUM_LENGTH) {
throw new IllegalArgumentException("OFPortDescPropRecirculateVer15: message length (" + length + ") exceeds maximum (0xFFFF)");
}
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFPortDescPropRecirculateVer15(");
b.append("portNos=").append(Arrays.toString(portNos));
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFPortDescPropRecirculateVer15 other = (OFPortDescPropRecirculateVer15) obj;
if (!Arrays.equals(portNos, other.portNos))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(portNos);
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.scan.result.iterator;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import org.apache.carbondata.common.CarbonIterator;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.DataRefNode;
import org.apache.carbondata.core.datastore.FileReader;
import org.apache.carbondata.core.datastore.block.AbstractIndex;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode;
import org.apache.carbondata.core.mutate.DeleteDeltaVo;
import org.apache.carbondata.core.reader.CarbonDeleteFilesDataReader;
import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
import org.apache.carbondata.core.scan.executor.infos.DeleteDeltaInfo;
import org.apache.carbondata.core.scan.model.QueryModel;
import org.apache.carbondata.core.scan.processor.DataBlockIterator;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnarBatch;
import org.apache.carbondata.core.stats.QueryStatistic;
import org.apache.carbondata.core.stats.QueryStatisticsConstants;
import org.apache.carbondata.core.stats.QueryStatisticsModel;
import org.apache.carbondata.core.stats.QueryStatisticsRecorder;
import org.apache.carbondata.core.util.CarbonProperties;
/**
* In case of detail query we cannot keep all the records in memory so for
* executing that query are returning a iterator over block and every time next
* call will come it will execute the block and return the result
*/
public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterator<E> {
/**
* LOGGER.
*/
private static final LogService LOGGER =
LogServiceFactory.getLogService(AbstractDetailQueryResultIterator.class.getName());
private static final Map<DeleteDeltaInfo, Object> deleteDeltaToLockObjectMap =
new ConcurrentHashMap<>();
protected ExecutorService execService;
/**
* execution info of the block
*/
protected List<BlockExecutionInfo> blockExecutionInfos;
/**
* file reader which will be used to execute the query
*/
protected FileReader fileReader;
DataBlockIterator dataBlockIterator;
/**
* QueryStatisticsRecorder
*/
protected QueryStatisticsRecorder recorder;
/**
* number of cores which can be used
*/
protected int batchSize;
/**
* queryStatisticsModel to store query statistics object
*/
private QueryStatisticsModel queryStatisticsModel;
AbstractDetailQueryResultIterator(List<BlockExecutionInfo> infos, QueryModel queryModel,
ExecutorService execService) {
String batchSizeString =
CarbonProperties.getInstance().getProperty(CarbonCommonConstants.DETAIL_QUERY_BATCH_SIZE);
if (null != batchSizeString) {
try {
batchSize = Integer.parseInt(batchSizeString);
} catch (NumberFormatException ne) {
LOGGER.error("Invalid inmemory records size. Using default value");
batchSize = CarbonCommonConstants.DETAIL_QUERY_BATCH_SIZE_DEFAULT;
}
} else {
batchSize = CarbonCommonConstants.DETAIL_QUERY_BATCH_SIZE_DEFAULT;
}
this.recorder = queryModel.getStatisticsRecorder();
this.blockExecutionInfos = infos;
this.fileReader = FileFactory.getFileHolder(
FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getTablePath()));
this.fileReader.setReadPageByPage(queryModel.isReadPageByPage());
this.execService = execService;
intialiseInfos();
initQueryStatiticsModel();
}
private void intialiseInfos() {
for (BlockExecutionInfo blockInfo : blockExecutionInfos) {
Map<String, DeleteDeltaVo> deletedRowsMap = null;
// if delete delta file is present
if (null != blockInfo.getDeleteDeltaFilePath() && 0 != blockInfo
.getDeleteDeltaFilePath().length) {
DeleteDeltaInfo deleteDeltaInfo = new DeleteDeltaInfo(blockInfo.getDeleteDeltaFilePath());
// read and get the delete detail block details
deletedRowsMap = getDeleteDeltaDetails(blockInfo.getDataBlock(), deleteDeltaInfo);
// set the deleted row to block execution info
blockInfo.setDeletedRecordsMap(deletedRowsMap);
}
DataRefNode dataRefNode = blockInfo.getDataBlock().getDataRefNode();
assert (dataRefNode instanceof BlockletDataRefNode);
BlockletDataRefNode node = (BlockletDataRefNode) dataRefNode;
blockInfo.setFirstDataBlock(node);
blockInfo.setNumberOfBlockToScan(node.numberOfNodes());
}
}
/**
* Below method will be used to get the delete delta rows for a block
*
* @param dataBlock data block
* @param deleteDeltaInfo delete delta info
* @return blockid+pageid to deleted row mapping
*/
private Map<String, DeleteDeltaVo> getDeleteDeltaDetails(AbstractIndex dataBlock,
DeleteDeltaInfo deleteDeltaInfo) {
// if datablock deleted delta timestamp is more then the current delete delta files timestamp
// then return the current deleted rows
if (dataBlock.getDeleteDeltaTimestamp() >= deleteDeltaInfo
.getLatestDeleteDeltaFileTimestamp()) {
return dataBlock.getDeletedRowsMap();
}
CarbonDeleteFilesDataReader carbonDeleteDeltaFileReader = null;
// get the lock object so in case of concurrent query only one task will read the delete delta
// files other tasks will wait
Object lockObject = deleteDeltaToLockObjectMap.get(deleteDeltaInfo);
// if lock object is null then add a lock object
if (null == lockObject) {
synchronized (deleteDeltaToLockObjectMap) {
// double checking
lockObject = deleteDeltaToLockObjectMap.get(deleteDeltaInfo);
if (null == lockObject) {
lockObject = new Object();
deleteDeltaToLockObjectMap.put(deleteDeltaInfo, lockObject);
}
}
}
// double checking to check the deleted rows is already present or not
if (dataBlock.getDeleteDeltaTimestamp() < deleteDeltaInfo.getLatestDeleteDeltaFileTimestamp()) {
// if not then acquire the lock
synchronized (lockObject) {
// check the timestamp again
if (dataBlock.getDeleteDeltaTimestamp() < deleteDeltaInfo
.getLatestDeleteDeltaFileTimestamp()) {
// read the delete delta files
carbonDeleteDeltaFileReader = new CarbonDeleteFilesDataReader();
Map<String, DeleteDeltaVo> deletedRowsMap = carbonDeleteDeltaFileReader
.getDeletedRowsDataVo(deleteDeltaInfo.getDeleteDeltaFile());
setDeltedDeltaBoToDataBlock(deleteDeltaInfo, deletedRowsMap, dataBlock);
// remove the lock
deleteDeltaToLockObjectMap.remove(deleteDeltaInfo);
return deletedRowsMap;
} else {
return dataBlock.getDeletedRowsMap();
}
}
} else {
return dataBlock.getDeletedRowsMap();
}
}
/**
* Below method will be used to set deleted records map to data block
* based on latest delta file timestamp
*
* @param deleteDeltaInfo
* @param deletedRecordsMap
* @param dataBlock
*/
private void setDeltedDeltaBoToDataBlock(DeleteDeltaInfo deleteDeltaInfo,
Map<String, DeleteDeltaVo> deletedRecordsMap, AbstractIndex dataBlock) {
// check if timestamp of data block is less than the latest delete delta timestamp
// then update the delete delta details and timestamp in data block
if (dataBlock.getDeleteDeltaTimestamp() < deleteDeltaInfo.getLatestDeleteDeltaFileTimestamp()) {
synchronized (dataBlock) {
if (dataBlock.getDeleteDeltaTimestamp() < deleteDeltaInfo
.getLatestDeleteDeltaFileTimestamp()) {
dataBlock.setDeletedRowsMap(deletedRecordsMap);
dataBlock.setDeleteDeltaTimestamp(deleteDeltaInfo.getLatestDeleteDeltaFileTimestamp());
}
}
}
}
@Override
public boolean hasNext() {
if ((dataBlockIterator != null && dataBlockIterator.hasNext())) {
return true;
} else if (blockExecutionInfos.size() > 0) {
return true;
} else {
return false;
}
}
void updateDataBlockIterator() {
if (dataBlockIterator == null || !dataBlockIterator.hasNext()) {
dataBlockIterator = getDataBlockIterator();
while (dataBlockIterator != null && !dataBlockIterator.hasNext()) {
dataBlockIterator = getDataBlockIterator();
}
}
}
private DataBlockIterator getDataBlockIterator() {
if (blockExecutionInfos.size() > 0) {
try {
fileReader.finish();
} catch (IOException e) {
throw new RuntimeException(e);
}
BlockExecutionInfo executionInfo = blockExecutionInfos.get(0);
blockExecutionInfos.remove(executionInfo);
return new DataBlockIterator(executionInfo, fileReader, batchSize, queryStatisticsModel,
execService);
}
return null;
}
private void initQueryStatiticsModel() {
this.queryStatisticsModel = new QueryStatisticsModel();
this.queryStatisticsModel.setRecorder(recorder);
QueryStatistic queryStatisticTotalBlocklet = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, queryStatisticTotalBlocklet);
queryStatisticsModel.getRecorder().recordStatistics(queryStatisticTotalBlocklet);
QueryStatistic queryStatisticValidScanBlocklet = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM, queryStatisticValidScanBlocklet);
queryStatisticsModel.getRecorder().recordStatistics(queryStatisticValidScanBlocklet);
QueryStatistic totalNumberOfPages = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.TOTAL_PAGE_SCANNED, totalNumberOfPages);
queryStatisticsModel.getRecorder().recordStatistics(totalNumberOfPages);
QueryStatistic validPages = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages);
queryStatisticsModel.getRecorder().recordStatistics(validPages);
QueryStatistic scannedPages = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.PAGE_SCANNED, scannedPages);
queryStatisticsModel.getRecorder().recordStatistics(scannedPages);
QueryStatistic scanTime = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime);
queryStatisticsModel.getRecorder().recordStatistics(scanTime);
QueryStatistic readTime = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.READ_BLOCKlET_TIME, readTime);
queryStatisticsModel.getRecorder().recordStatistics(readTime);
// dimension filling time
QueryStatistic keyColumnFilingTime = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME, keyColumnFilingTime);
queryStatisticsModel.getRecorder().recordStatistics(keyColumnFilingTime);
// measure filling time
QueryStatistic measureFilingTime = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.MEASURE_FILLING_TIME, measureFilingTime);
queryStatisticsModel.getRecorder().recordStatistics(measureFilingTime);
// page Io Time
QueryStatistic pageUncompressTime = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME, pageUncompressTime);
queryStatisticsModel.getRecorder().recordStatistics(pageUncompressTime);
// result preparation time
QueryStatistic resultPreparationTime = new QueryStatistic();
queryStatisticsModel.getStatisticsTypeAndObjMap()
.put(QueryStatisticsConstants.RESULT_PREP_TIME, resultPreparationTime);
queryStatisticsModel.getRecorder().recordStatistics(resultPreparationTime);
}
public void processNextBatch(CarbonColumnarBatch columnarBatch) {
throw new UnsupportedOperationException("Please use VectorDetailQueryResultIterator");
}
@Override public void close() {
if (null != dataBlockIterator) {
dataBlockIterator.close();
}
try {
fileReader.finish();
} catch (IOException e) {
LOGGER.error(e);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentilesAggregatorFactory;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric, PercentilesAggregatorBuilder> {
public static final String NAME = InternalTDigestPercentiles.TYPE.name();
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
private double[] percents = PercentilesParser.DEFAULT_PERCENTS;
private PercentilesMethod method = PercentilesMethod.TDIGEST;
private int numberOfSignificantValueDigits = 3;
private double compression = 100.0;
private boolean keyed = true;
public PercentilesAggregatorBuilder(String name) {
super(name, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
}
/**
* Read from a stream.
*/
public PercentilesAggregatorBuilder(StreamInput in) throws IOException {
super(in, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
percents = in.readDoubleArray();
keyed = in.readBoolean();
numberOfSignificantValueDigits = in.readVInt();
compression = in.readDouble();
method = PercentilesMethod.readFromStream(in);
}
@Override
protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeDoubleArray(percents);
out.writeBoolean(keyed);
out.writeVInt(numberOfSignificantValueDigits);
out.writeDouble(compression);
method.writeTo(out);
}
/**
* Set the values to compute percentiles from.
*/
public PercentilesAggregatorBuilder percentiles(double... percents) {
if (percents == null) {
throw new IllegalArgumentException("[percents] must not be null: [" + name + "]");
}
double[] sortedPercents = Arrays.copyOf(percents, percents.length);
Arrays.sort(sortedPercents);
this.percents = sortedPercents;
return this;
}
/**
* Get the values to compute percentiles from.
*/
public double[] percentiles() {
return percents;
}
/**
* Set whether the XContent response should be keyed
*/
public PercentilesAggregatorBuilder keyed(boolean keyed) {
this.keyed = keyed;
return this;
}
/**
* Get whether the XContent response should be keyed
*/
public boolean keyed() {
return keyed;
}
/**
* Expert: set the number of significant digits in the values. Only relevant
* when using {@link PercentilesMethod#HDR}.
*/
public PercentilesAggregatorBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
if (numberOfSignificantValueDigits < 0 || numberOfSignificantValueDigits > 5) {
throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]");
}
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
return this;
}
/**
* Expert: get the number of significant digits in the values. Only relevant
* when using {@link PercentilesMethod#HDR}.
*/
public int numberOfSignificantValueDigits() {
return numberOfSignificantValueDigits;
}
/**
* Expert: set the compression. Higher values improve accuracy but also
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
*/
public PercentilesAggregatorBuilder compression(double compression) {
if (compression < 0.0) {
throw new IllegalArgumentException(
"[compression] must be greater than or equal to 0. Found [" + compression + "] in [" + name + "]");
}
this.compression = compression;
return this;
}
/**
* Expert: get the compression. Higher values improve accuracy but also
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
*/
public double compression() {
return compression;
}
public PercentilesAggregatorBuilder method(PercentilesMethod method) {
if (method == null) {
throw new IllegalArgumentException("[method] must not be null: [" + name + "]");
}
this.method = method;
return this;
}
public PercentilesMethod method() {
return method;
}
@Override
protected ValuesSourceAggregatorFactory<Numeric, ?> innerBuild(AggregationContext context, ValuesSourceConfig<Numeric> config,
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
switch (method) {
case TDIGEST:
return new TDigestPercentilesAggregatorFactory(name, type, config, percents, compression, keyed, context, parent,
subFactoriesBuilder, metaData);
case HDR:
return new HDRPercentilesAggregatorFactory(name, type, config, percents, numberOfSignificantValueDigits, keyed, context, parent,
subFactoriesBuilder, metaData);
default:
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
}
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents);
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
builder.startObject(method.getName());
if (method == PercentilesMethod.TDIGEST) {
builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression);
} else {
builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
}
builder.endObject();
return builder;
}
@Override
protected boolean innerEquals(Object obj) {
PercentilesAggregatorBuilder other = (PercentilesAggregatorBuilder) obj;
if (!Objects.equals(method, other.method)) {
return false;
}
boolean equalSettings = false;
switch (method) {
case HDR:
equalSettings = Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits);
break;
case TDIGEST:
equalSettings = Objects.equals(compression, other.compression);
break;
default:
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
}
return equalSettings
&& Objects.deepEquals(percents, other.percents)
&& Objects.equals(keyed, other.keyed)
&& Objects.equals(method, other.method);
}
@Override
protected int innerHashCode() {
switch (method) {
case HDR:
return Objects.hash(Arrays.hashCode(percents), keyed, numberOfSignificantValueDigits, method);
case TDIGEST:
return Objects.hash(Arrays.hashCode(percents), keyed, compression, method);
default:
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
}
}
@Override
public String getWriteableName() {
return NAME;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.compute.v2019_11_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Path;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in Usages.
*/
public class UsagesInner {
/** The Retrofit service to perform REST calls. */
private UsagesService service;
/** The service client containing this operation class. */
private ComputeManagementClientImpl client;
/**
* Initializes an instance of UsagesInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public UsagesInner(Retrofit retrofit, ComputeManagementClientImpl client) {
this.service = retrofit.create(UsagesService.class);
this.client = client;
}
/**
* The interface defining all the services for Usages to be
* used by Retrofit to perform actually REST calls.
*/
interface UsagesService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_11_01.Usages list" })
@GET("subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/usages")
Observable<Response<ResponseBody>> list(@Path("location") String location, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2019_11_01.Usages listNext" })
@GET
Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param location The location for which resource usage is queried.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<UsageInner> object if successful.
*/
public PagedList<UsageInner> list(final String location) {
ServiceResponse<Page<UsageInner>> response = listSinglePageAsync(location).toBlocking().single();
return new PagedList<UsageInner>(response.body()) {
@Override
public Page<UsageInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param location The location for which resource usage is queried.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<UsageInner>> listAsync(final String location, final ListOperationCallback<UsageInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listSinglePageAsync(location),
new Func1<String, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param location The location for which resource usage is queried.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<UsageInner> object
*/
public Observable<Page<UsageInner>> listAsync(final String location) {
return listWithServiceResponseAsync(location)
.map(new Func1<ServiceResponse<Page<UsageInner>>, Page<UsageInner>>() {
@Override
public Page<UsageInner> call(ServiceResponse<Page<UsageInner>> response) {
return response.body();
}
});
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param location The location for which resource usage is queried.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<UsageInner> object
*/
public Observable<ServiceResponse<Page<UsageInner>>> listWithServiceResponseAsync(final String location) {
return listSinglePageAsync(location)
.concatMap(new Func1<ServiceResponse<Page<UsageInner>>, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(ServiceResponse<Page<UsageInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
ServiceResponse<PageImpl1<UsageInner>> * @param location The location for which resource usage is queried.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<UsageInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<UsageInner>>> listSinglePageAsync(final String location) {
if (location == null) {
throw new IllegalArgumentException("Parameter location is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2019-07-01";
return service.list(location, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl1<UsageInner>> result = listDelegate(response);
return Observable.just(new ServiceResponse<Page<UsageInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl1<UsageInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl1<UsageInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl1<UsageInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<UsageInner> object if successful.
*/
public PagedList<UsageInner> listNext(final String nextPageLink) {
ServiceResponse<Page<UsageInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<UsageInner>(response.body()) {
@Override
public Page<UsageInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<UsageInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<UsageInner>> serviceFuture, final ListOperationCallback<UsageInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<UsageInner> object
*/
public Observable<Page<UsageInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<UsageInner>>, Page<UsageInner>>() {
@Override
public Page<UsageInner> call(ServiceResponse<Page<UsageInner>> response) {
return response.body();
}
});
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<UsageInner> object
*/
public Observable<ServiceResponse<Page<UsageInner>>> listNextWithServiceResponseAsync(final String nextPageLink) {
return listNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<UsageInner>>, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(ServiceResponse<Page<UsageInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
ServiceResponse<PageImpl1<UsageInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<UsageInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<UsageInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl1<UsageInner>> result = listNextDelegate(response);
return Observable.just(new ServiceResponse<Page<UsageInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl1<UsageInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl1<UsageInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl1<UsageInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/**
* SAHARA Scheduling Server
*
* Schedules and assigns local laboratory rigs.
*
* @license See LICENSE in the top level directory for complete license terms.
*
* Copyright (c) 2011, University of Technology, Sydney
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of Technology, Sydney nor the names
* of its contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* @author Michael Diponio (mdiponio)
* @date 29th January 2011
*/
package au.edu.uts.eng.remotelabs.schedserver.rigmanagement.intf.types;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Calendar;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import org.apache.axiom.om.OMConstants;
import org.apache.axiom.om.OMDataSource;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMFactory;
import org.apache.axiom.om.impl.llom.OMSourcedElementImpl;
import org.apache.axis2.databinding.ADBBean;
import org.apache.axis2.databinding.ADBDataSource;
import org.apache.axis2.databinding.ADBException;
import org.apache.axis2.databinding.utils.BeanUtil;
import org.apache.axis2.databinding.utils.ConverterUtil;
import org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl;
import org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter;
/**
* OfflinePeriodType bean class.
*/
public class OfflinePeriodType implements ADBBean
{
/*
* This type was generated from the piece of schema that had
* name = OfflinePeriodType
* Namespace URI = http://remotelabs.eng.uts.edu.au/rigmanagement
* Namespace Prefix = ns1
*/
private static final long serialVersionUID = -394706801551447356L;
private static String generatePrefix(final String namespace)
{
if (namespace.equals("http://remotelabs.eng.uts.edu.au/rigmanagement"))
{
return "ns1";
}
return BeanUtil.getUniquePrefix();
}
protected int id;
public int getId()
{
return this.id;
}
public void setId(final int param)
{
this.id = param;
}
protected Calendar start;
public Calendar getStart()
{
return this.start;
}
public void setStart(final Calendar param)
{
this.start = param;
}
protected Calendar end;
public Calendar getEnd()
{
return this.end;
}
public void setEnd(final Calendar param)
{
this.end = param;
}
protected String reason;
public String getReason()
{
return this.reason;
}
public void setReason(final String param)
{
this.reason = param;
}
public static boolean isReaderMTOMAware(final XMLStreamReader reader)
{
boolean isReaderMTOMAware = false;
try
{
isReaderMTOMAware = Boolean.TRUE.equals(reader.getProperty(OMConstants.IS_DATA_HANDLERS_AWARE));
}
catch (final IllegalArgumentException e)
{
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
public OMElement getOMElement(final QName parentQName, final OMFactory factory) throws ADBException
{
final OMDataSource dataSource = new ADBDataSource(this, parentQName)
{
@Override
public void serialize(final MTOMAwareXMLStreamWriter xmlWriter) throws XMLStreamException
{
OfflinePeriodType.this.serialize(this.parentQName, factory, xmlWriter);
}
};
return new OMSourcedElementImpl(parentQName, factory, dataSource);
}
@Override
public void serialize(final QName parentQName, final OMFactory factory, final MTOMAwareXMLStreamWriter xmlWriter)
throws XMLStreamException, ADBException
{
this.serialize(parentQName, factory, xmlWriter, false);
}
@Override
public void serialize(final QName parentQName, final OMFactory factory, final MTOMAwareXMLStreamWriter xmlWriter,
final boolean serializeType) throws XMLStreamException, ADBException
{
String prefix = parentQName.getPrefix();
String namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0))
{
final String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null)
{
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
}
else
{
if (prefix == null)
{
prefix = OfflinePeriodType.generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
}
else
{
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType)
{
final String namespacePrefix = this.registerPrefix(xmlWriter, "http://remotelabs.eng.uts.edu.au/rigmanagement");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0))
{
this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", namespacePrefix
+ ":OfflinePeriodType", xmlWriter);
}
else
{
this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", "OfflinePeriodType",
xmlWriter);
}
}
namespace = "";
if (!namespace.equals(""))
{
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = OfflinePeriodType.generatePrefix(namespace);
xmlWriter.writeStartElement(prefix, "id", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
else
{
xmlWriter.writeStartElement(namespace, "id");
}
}
else
{
xmlWriter.writeStartElement("id");
}
xmlWriter.writeCharacters(ConverterUtil.convertToString(this.id));
xmlWriter.writeEndElement();
namespace = "";
if (!namespace.equals(""))
{
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = OfflinePeriodType.generatePrefix(namespace);
xmlWriter.writeStartElement(prefix, "start", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
else
{
xmlWriter.writeStartElement(namespace, "start");
}
}
else
{
xmlWriter.writeStartElement("start");
}
if (this.start == null)
{
throw new ADBException("start cannot be null!!");
}
else
{
xmlWriter.writeCharacters(ConverterUtil.convertToString(this.start));
}
xmlWriter.writeEndElement();
namespace = "";
if (!namespace.equals(""))
{
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = OfflinePeriodType.generatePrefix(namespace);
xmlWriter.writeStartElement(prefix, "end", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
else
{
xmlWriter.writeStartElement(namespace, "end");
}
}
else
{
xmlWriter.writeStartElement("end");
}
if (this.end == null)
{
throw new ADBException("end cannot be null!!");
}
else
{
xmlWriter.writeCharacters(ConverterUtil.convertToString(this.end));
}
xmlWriter.writeEndElement();
namespace = "";
if (!namespace.equals(""))
{
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = OfflinePeriodType.generatePrefix(namespace);
xmlWriter.writeStartElement(prefix, "reason", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
else
{
xmlWriter.writeStartElement(namespace, "reason");
}
}
else
{
xmlWriter.writeStartElement("reason");
}
if (this.reason == null)
{
throw new ADBException("reason cannot be null!!");
}
else
{
xmlWriter.writeCharacters(this.reason);
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
private void writeAttribute(final String prefix, final String namespace, final String attName,
final String attValue, final XMLStreamWriter xmlWriter) throws XMLStreamException
{
if (xmlWriter.getPrefix(namespace) == null)
{
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace, attName, attValue);
}
private String registerPrefix(final XMLStreamWriter xmlWriter, final String namespace) throws XMLStreamException
{
String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = OfflinePeriodType.generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null)
{
prefix = BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
@Override
public XMLStreamReader getPullParser(final QName qName) throws ADBException
{
final ArrayList<Serializable> elementList = new ArrayList<Serializable>();
elementList.add(new QName("", "id"));
elementList.add(ConverterUtil.convertToString(this.id));
elementList.add(new QName("", "start"));
if (this.start != null)
{
elementList.add(ConverterUtil.convertToString(this.start));
}
else
{
throw new ADBException("start cannot be null!!");
}
elementList.add(new QName("", "end"));
if (this.end != null)
{
elementList.add(ConverterUtil.convertToString(this.end));
}
else
{
throw new ADBException("end cannot be null!!");
}
elementList.add(new QName("", "reason"));
if (this.reason != null)
{
elementList.add(ConverterUtil.convertToString(this.reason));
}
else
{
throw new ADBException("reason cannot be null!!");
}
return new ADBXMLStreamReaderImpl(qName, elementList.toArray(), new Object[0]);
}
public static class Factory
{
public static OfflinePeriodType parse(final XMLStreamReader reader) throws Exception
{
final OfflinePeriodType object = new OfflinePeriodType();
try
{
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type") != null)
{
final String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName != null)
{
String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1)
{
nsPrefix = fullTypeName.substring(0, fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix == null ? "" : nsPrefix;
final String type = fullTypeName.substring(fullTypeName.indexOf(":") + 1);
if (!"OfflinePeriodType".equals(type))
{
final String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (OfflinePeriodType) ExtensionMapper.getTypeObject(nsUri, type, reader);
}
}
}
reader.next();
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement() && new QName("", "id").equals(reader.getName()))
{
final String content = reader.getElementText();
object.setId(ConverterUtil.convertToInt(content));
reader.next();
}
else
{
throw new ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement() && new QName("", "start").equals(reader.getName()))
{
final String content = reader.getElementText();
object.setStart(ConverterUtil.convertToDateTime(content));
reader.next();
}
else
{
throw new ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement() && new QName("", "end").equals(reader.getName()))
{
final String content = reader.getElementText();
object.setEnd(ConverterUtil.convertToDateTime(content));
reader.next();
}
else
{
throw new ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement() && new QName("", "reason").equals(reader.getName()))
{
final String content = reader.getElementText();
object.setReason(ConverterUtil.convertToString(content));
reader.next();
}
else
{
throw new ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement())
{
throw new ADBException("Unexpected subelement " + reader.getLocalName());
}
}
catch (final XMLStreamException e)
{
throw new Exception(e);
}
return object;
}
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.modelcompiler;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.drools.core.ClockType;
import org.drools.modelcompiler.domain.StockFact;
import org.drools.modelcompiler.domain.StockTick;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.conf.EventProcessingOption;
import org.kie.api.definition.type.FactType;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.conf.ClockTypeOption;
import org.kie.api.time.SessionPseudoClock;
import static org.junit.Assert.assertEquals;
public class CepTest extends BaseModelTest {
public CepTest( RUN_TYPE testRunType ) {
super( testRunType );
}
public static KieModuleModel getCepKieModuleModel() {
KieModuleModel kproj = KieServices.get().newKieModuleModel();
kproj.newKieBaseModel( "kb" )
.setDefault( true )
.setEventProcessingMode( EventProcessingOption.STREAM )
.newKieSessionModel( "ks" )
.setDefault( true ).setClockType( ClockTypeOption.get( ClockType.PSEUDO_CLOCK.getId() ) );
return kproj;
}
@Test
public void testAfter() throws Exception {
String str =
"import " + StockTick.class.getCanonicalName() + ";" +
"rule R when\n" +
" $a : StockTick( company == \"DROO\" )\n" +
" $b : StockTick( company == \"ACME\", this after[5s,8s] $a )\n" +
"then\n" +
" System.out.println(\"fired\");\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
ksession.insert( new StockTick( "DROO" ) );
clock.advanceTime( 6, TimeUnit.SECONDS );
ksession.insert( new StockTick( "ACME" ) );
assertEquals( 1, ksession.fireAllRules() );
clock.advanceTime( 4, TimeUnit.SECONDS );
ksession.insert( new StockTick( "ACME" ) );
assertEquals( 0, ksession.fireAllRules() );
}
@Test
public void testNegatedAfter() throws Exception {
String str =
"import " + StockTick.class.getCanonicalName() + ";" +
"rule R when\n" +
" $a : StockTick( company == \"DROO\" )\n" +
" $b : StockTick( company == \"ACME\", this not after[5s,8s] $a )\n" +
"then\n" +
" System.out.println(\"fired\");\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
ksession.insert( new StockTick( "DROO" ) );
clock.advanceTime( 6, TimeUnit.SECONDS );
ksession.insert( new StockTick( "ACME" ) );
assertEquals( 0, ksession.fireAllRules() );
clock.advanceTime( 4, TimeUnit.SECONDS );
ksession.insert( new StockTick( "ACME" ) );
assertEquals( 1, ksession.fireAllRules() );
}
@Test
public void testAfterWithEntryPoints() throws Exception {
String str =
"import " + StockTick.class.getCanonicalName() + ";" +
"rule R when\n" +
" $a : StockTick( company == \"DROO\" ) from entry-point ep1\n" +
" $b : StockTick( company == \"ACME\", this after[5s,8s] $a ) from entry-point ep2\n" +
"then\n" +
" System.out.println(\"fired\");\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
ksession.getEntryPoint( "ep1" ).insert( new StockTick( "DROO" ) );
clock.advanceTime( 6, TimeUnit.SECONDS );
ksession.getEntryPoint( "ep1" ).insert( new StockTick( "ACME" ) );
assertEquals( 0, ksession.fireAllRules() );
clock.advanceTime( 1, TimeUnit.SECONDS );
ksession.getEntryPoint( "ep2" ).insert( new StockTick( "ACME" ) );
assertEquals( 1, ksession.fireAllRules() );
clock.advanceTime( 4, TimeUnit.SECONDS );
ksession.getEntryPoint( "ep2" ).insert( new StockTick( "ACME" ) );
assertEquals( 0, ksession.fireAllRules() );
}
@Test
public void testSlidingWindow() throws Exception {
String str =
"import " + StockTick.class.getCanonicalName() + ";\n" +
"rule R when\n" +
" $a : StockTick( company == \"DROO\" ) over window:length( 2 )\n" +
"then\n" +
" System.out.println(\"fired\");\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
clock.advanceTime( 1, TimeUnit.SECONDS );
ksession.insert( new StockTick( "DROO" ) );
clock.advanceTime( 1, TimeUnit.SECONDS );
ksession.insert( new StockTick( "DROO" ) );
clock.advanceTime( 1, TimeUnit.SECONDS );
ksession.insert( new StockTick( "ACME" ) );
clock.advanceTime( 1, TimeUnit.SECONDS );
ksession.insert( new StockTick( "DROO" ) );
assertEquals( 2, ksession.fireAllRules() );
}
@Test
public void testNotAfter() throws Exception {
String str =
"import " + StockTick.class.getCanonicalName() + ";" +
"rule R when\n" +
" $a : StockTick( company == \"DROO\" )\n" +
" not( StockTick( company == \"ACME\", this after[5s,8s] $a ) )\n" +
"then\n" +
" System.out.println(\"fired\");\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
ksession.insert( new StockTick("DROO") );
clock.advanceTime( 6, TimeUnit.SECONDS );
ksession.insert( new StockTick("ACME") );
clock.advanceTime( 10, TimeUnit.SECONDS );
assertEquals(0, ksession.fireAllRules());
ksession.insert( new StockTick("DROO") );
clock.advanceTime( 3, TimeUnit.SECONDS );
ksession.insert( new StockTick("ACME") );
clock.advanceTime( 10, TimeUnit.SECONDS );
assertEquals(1, ksession.fireAllRules());
}
@Test
public void testDeclaredSlidingWindow() throws Exception {
String str =
"import " + StockTick.class.getCanonicalName() + ";\n" +
"declare window DeclaredWindow\n" +
" StockTick( company == \"DROO\" ) over window:time( 5s )\n" +
"end\n" +
"rule R when\n" +
" $a : StockTick() from window DeclaredWindow\n" +
"then\n" +
" System.out.println($a.getCompany());\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
clock.advanceTime( 2, TimeUnit.SECONDS );
ksession.insert( new StockTick("DROO") );
clock.advanceTime( 2, TimeUnit.SECONDS );
ksession.insert( new StockTick("DROO") );
clock.advanceTime( 2, TimeUnit.SECONDS );
ksession.insert( new StockTick("ACME") );
clock.advanceTime( 2, TimeUnit.SECONDS );
ksession.insert( new StockTick("DROO") );
assertEquals(2, ksession.fireAllRules());
}
@Test
public void testDeclaredSlidingWindowOnEventInTypeDeclaration() throws Exception {
String str =
"declare String\n" +
" @role( event )\n" +
"end\n" +
"declare window DeclaredWindow\n" +
" String( ) over window:time( 5s )\n" +
"end\n" +
"rule R when\n" +
" $a : String( this == \"DROO\" ) from window DeclaredWindow\n" +
"then\n" +
" System.out.println($a);\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
ksession.insert( "ACME" );
ksession.insert( "DROO" );
assertEquals(1, ksession.fireAllRules());
}
@Test
public void testDeclaredSlidingWindowWith2Arguments() throws Exception {
String str =
"declare String\n" +
" @role( event )\n" +
"end\n" +
"declare window DeclaredWindow\n" +
" String( length == 4, this.startsWith(\"D\") ) over window:time( 5s )\n" +
"end\n" +
"rule R when\n" +
" $a : String() from window DeclaredWindow\n" +
"then\n" +
" System.out.println($a);\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
ksession.insert( "ACME" );
ksession.insert( "DROO" );
assertEquals(1, ksession.fireAllRules());
}
@Test
public void testWithDeclaredEvent() throws Exception {
String str =
"import " + StockFact.class.getCanonicalName() + ";\n" +
"declare StockFact @role( event ) end;\n" +
"rule R when\n" +
" $a : StockFact( company == \"DROO\" )\n" +
" $b : StockFact( company == \"ACME\", this after[5s,8s] $a )\n" +
"then\n" +
" System.out.println(\"fired\");\n" +
"end\n";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
ksession.insert( new StockFact( "DROO" ) );
clock.advanceTime( 6, TimeUnit.SECONDS );
ksession.insert( new StockFact( "ACME" ) );
assertEquals( 1, ksession.fireAllRules() );
clock.advanceTime( 4, TimeUnit.SECONDS );
ksession.insert( new StockFact( "ACME" ) );
assertEquals( 0, ksession.fireAllRules() );
}
@Test
public void testExpireEventOnEndTimestamp() throws Exception {
String str =
"package org.drools.compiler;\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"global java.util.List resultsAfter;\n" +
"\n" +
"rule \"after[60,80]\"\n" +
"when\n" +
"$a : StockTick( company == \"DROO\" )\n" +
"$b : StockTick( company == \"ACME\", this after[60,80] $a )\n" +
"then\n" +
" resultsAfter.add( $b );\n" +
"end";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
List<StockTick> resultsAfter = new ArrayList<StockTick>();
ksession.setGlobal("resultsAfter", resultsAfter);
// inserting new StockTick with duration 30 at time 0 => rule
// after[60,80] should fire when ACME lasts at 100-120
ksession.insert(new StockTick("DROO", 30));
clock.advanceTime(100, TimeUnit.MILLISECONDS);
ksession.insert(new StockTick("ACME", 20));
ksession.fireAllRules();
assertEquals(1, resultsAfter.size());
}
@Test
public void testExpireEventOnEndTimestampWithDeclaredEvent() throws Exception {
String str =
"package org.drools.compiler;\n" +
"import " + StockFact.class.getCanonicalName() + ";\n" +
"global java.util.List resultsAfter;\n" +
"\n" +
"declare StockFact\n" +
" @role( event )\n" +
" @duration( duration )\n" +
"end\n" +
"\n" +
"rule \"after[60,80]\"\n" +
"when\n" +
"$a : StockFact( company == \"DROO\" )\n" +
"$b : StockFact( company == \"ACME\", this after[60,80] $a )\n" +
"then\n" +
" resultsAfter.add( $b );\n" +
"end";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
List<StockTick> resultsAfter = new ArrayList<StockTick>();
ksession.setGlobal("resultsAfter", resultsAfter);
// inserting new StockTick with duration 30 at time 0 => rule
// after[60,80] should fire when ACME lasts at 100-120
ksession.insert(new StockFact("DROO", 30));
clock.advanceTime(100, TimeUnit.MILLISECONDS);
ksession.insert(new StockFact("ACME", 20));
ksession.fireAllRules();
assertEquals(1, resultsAfter.size());
}
@Test
public void testExpires() throws Exception {
String str =
"package org.drools.compiler;\n" +
"import " + StockFact.class.getCanonicalName() + ";\n" +
"\n" +
"declare StockFact\n" +
" @role( value = event )\n" +
" @expires( value = 2s, policy = TIME_SOFT )\n" +
"end\n" +
"\n" +
"rule \"expiration\"\n" +
"when\n" +
" StockFact( company == \"DROO\" )\n" +
"then\n" +
"end";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
ksession.insert(new StockFact("DROO"));
clock.advanceTime(1, TimeUnit.SECONDS);
ksession.fireAllRules();
assertEquals(1, ksession.getObjects().size());
clock.advanceTime(2, TimeUnit.SECONDS);
ksession.fireAllRules();
assertEquals(0, ksession.getObjects().size());
}
@Test
public void testDeclareAndExpires() throws Exception {
String str =
"package org.drools.compiler;\n" +
"declare StockFact\n" +
" @role( value = event )\n" +
" @expires( value = 2s, policy = TIME_SOFT )\n" +
" company : String\n" +
" duration : long\n" +
"end\n" +
"\n" +
"rule \"expiration\"\n" +
"when\n" +
" StockFact( company == \"DROO\" )\n" +
"then\n" +
"end";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
SessionPseudoClock clock = ksession.getSessionClock();
FactType stockFactType = ksession.getKieBase().getFactType("org.drools.compiler", "StockFact");
Object DROO = stockFactType.newInstance();
stockFactType.set(DROO, "company", "DROO");
ksession.insert(DROO);
clock.advanceTime(1, TimeUnit.SECONDS);
ksession.fireAllRules();
assertEquals(1, ksession.getObjects().size());
clock.advanceTime(2, TimeUnit.SECONDS);
ksession.fireAllRules();
assertEquals(0, ksession.getObjects().size());
}
@Test
public void testNoEvent() {
String str =
"declare BaseEvent\n" +
" @role(event)\n" +
"end\n" +
"\n" +
"declare Event extends BaseEvent\n" +
" @role(event)\n" +
" property : String\n" +
"end\n" +
"\n" +
"declare NotEvent extends BaseEvent\n" +
" @role(event)\n" +
" property : String\n" +
"end\n" +
"\n" +
"rule \"not equal\" when\n" +
" not (\n" +
" ( and\n" +
" $e : BaseEvent( ) over window:length(3) from entry-point entryPoint\n" +
" NotEvent( this == $e, property == \"value\" ) from entry-point entryPoint\n" +
" )\n" +
" )\n" +
"then\n" +
"\n" +
"end";
KieSession ksession = getKieSession(getCepKieModuleModel(), str);
assertEquals( 1, ksession.fireAllRules() );
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util;
import com.intellij.openapi.util.Comparing;
import com.intellij.util.text.CharArrayCharSequence;
import gnu.trove.Equality;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.lang.reflect.Array;
import java.util.*;
@SuppressWarnings("MethodOverridesStaticMethodOfSuperclass")
public final class ArrayUtil extends ArrayUtilRt {
public static final char[] EMPTY_CHAR_ARRAY = ArrayUtilRt.EMPTY_CHAR_ARRAY;
public static final byte[] EMPTY_BYTE_ARRAY = ArrayUtilRt.EMPTY_BYTE_ARRAY;
public static final int[] EMPTY_INT_ARRAY = ArrayUtilRt.EMPTY_INT_ARRAY;
public static final Object[] EMPTY_OBJECT_ARRAY = ArrayUtilRt.EMPTY_OBJECT_ARRAY;
public static final String[] EMPTY_STRING_ARRAY = ArrayUtilRt.EMPTY_STRING_ARRAY;
public static final Class[] EMPTY_CLASS_ARRAY = ArrayUtilRt.EMPTY_CLASS_ARRAY;
public static final long[] EMPTY_LONG_ARRAY = ArrayUtilRt.EMPTY_LONG_ARRAY;
public static final File[] EMPTY_FILE_ARRAY = ArrayUtilRt.EMPTY_FILE_ARRAY;
public static final CharSequence EMPTY_CHAR_SEQUENCE = new CharArrayCharSequence(ArrayUtilRt.EMPTY_CHAR_ARRAY);
public static final ArrayFactory<String> STRING_ARRAY_FACTORY = ArrayUtil::newStringArray;
public static final ArrayFactory<Object> OBJECT_ARRAY_FACTORY = ArrayUtil::newObjectArray;
private ArrayUtil() { }
@Contract(pure=true)
public static byte @NotNull [] realloc(byte @NotNull [] array, final int newSize) {
if (newSize == 0) {
return ArrayUtilRt.EMPTY_BYTE_ARRAY;
}
final int oldSize = array.length;
return oldSize == newSize ? array : Arrays.copyOf(array, newSize);
}
@Contract(pure=true)
public static boolean @NotNull [] realloc(boolean @NotNull [] array, final int newSize) {
if (newSize == 0) {
return ArrayUtilRt.EMPTY_BOOLEAN_ARRAY;
}
final int oldSize = array.length;
return oldSize == newSize ? array : Arrays.copyOf(array, newSize);
}
@Contract(pure=true)
public static short @NotNull [] realloc(short @NotNull [] array, final int newSize) {
if (newSize == 0) {
return ArrayUtilRt.EMPTY_SHORT_ARRAY;
}
final int oldSize = array.length;
return oldSize == newSize ? array : Arrays.copyOf(array, newSize);
}
@Contract(pure=true)
public static long @NotNull [] realloc(long @NotNull [] array, int newSize) {
if (newSize == 0) {
return EMPTY_LONG_ARRAY;
}
final int oldSize = array.length;
return oldSize == newSize ? array : Arrays.copyOf(array, newSize);
}
@Contract(pure=true)
public static int @NotNull [] realloc(int @NotNull [] array, final int newSize) {
if (newSize == 0) {
return ArrayUtilRt.EMPTY_INT_ARRAY;
}
final int oldSize = array.length;
return oldSize == newSize ? array : Arrays.copyOf(array, newSize);
}
@Contract(pure=true)
public static <T> T @NotNull [] realloc(T @NotNull [] array, final int newSize, @NotNull ArrayFactory<? extends T> factory) {
final int oldSize = array.length;
if (oldSize == newSize) {
return array;
}
T[] result = factory.create(newSize);
if (newSize == 0) {
return result;
}
System.arraycopy(array, 0, result, 0, Math.min(oldSize, newSize));
return result;
}
@Contract(pure=true)
public static long @NotNull [] append(long @NotNull [] array, long value) {
array = realloc(array, array.length + 1);
array[array.length - 1] = value;
return array;
}
@Contract(pure=true)
public static int @NotNull [] append(int @NotNull [] array, int value) {
array = realloc(array, array.length + 1);
array[array.length - 1] = value;
return array;
}
@Contract(pure=true)
public static <T> T @NotNull [] insert(T @NotNull [] array, int index, T value) {
T[] result = newArray(getComponentType(array), array.length + 1);
System.arraycopy(array, 0, result, 0, index);
result[index] = value;
System.arraycopy(array, index, result, index + 1, array.length - index);
return result;
}
@Contract(pure=true)
public static int @NotNull [] insert(int @NotNull [] array, int index, int value) {
int[] result = new int[array.length + 1];
System.arraycopy(array, 0, result, 0, index);
result[index] = value;
System.arraycopy(array, index, result, index+1, array.length - index);
return result;
}
@Contract(pure=true)
public static byte @NotNull [] append(byte @NotNull [] array, byte value) {
array = realloc(array, array.length + 1);
array[array.length - 1] = value;
return array;
}
@Contract(pure=true)
public static boolean @NotNull [] append(boolean @NotNull [] array, boolean value) {
array = realloc(array, array.length + 1);
array[array.length - 1] = value;
return array;
}
@Contract(pure=true)
public static char @NotNull [] realloc(char @NotNull [] array, final int newSize) {
if (newSize == 0) {
return ArrayUtilRt.EMPTY_CHAR_ARRAY;
}
final int oldSize = array.length;
return oldSize == newSize ? array : Arrays.copyOf(array, newSize);
}
@Contract(pure=true)
public static <T> T @NotNull [] toObjectArray(@NotNull Collection<? extends T> collection, @NotNull Class<T> aClass) {
T[] array = newArray(aClass, collection.size());
return collection.toArray(array);
}
@Contract(pure=true)
public static <T> T @NotNull [] toObjectArray(@NotNull Class<T> aClass, Object @NotNull ... source) {
T[] array = newArray(aClass, source.length);
//noinspection SuspiciousSystemArraycopy
System.arraycopy(source, 0, array, 0, array.length);
return array;
}
@Contract(pure=true)
public static Object @NotNull [] toObjectArray(@NotNull Collection<?> collection) {
return collection.toArray(ArrayUtilRt.EMPTY_OBJECT_ARRAY);
}
@Contract(pure=true)
public static int @NotNull [] toIntArray(@NotNull Collection<Integer> list) {
int[] ret = newIntArray(list.size());
int i = 0;
for (Integer e : list) {
ret[i++] = e.intValue();
}
return ret;
}
@Contract(pure=true)
public static <T> T @NotNull [] mergeArrays(T @NotNull [] a1, T @NotNull [] a2) {
if (a1.length == 0) {
return a2;
}
if (a2.length == 0) {
return a1;
}
final Class<T> class1 = getComponentType(a1);
final Class<T> class2 = getComponentType(a2);
final Class<T> aClass = class1.isAssignableFrom(class2) ? class1 : class2;
T[] result = newArray(aClass, a1.length + a2.length);
System.arraycopy(a1, 0, result, 0, a1.length);
System.arraycopy(a2, 0, result, a1.length, a2.length);
return result;
}
@Contract(pure=true)
public static <T> T @NotNull [] mergeCollections(@NotNull Collection<? extends T> c1, @NotNull Collection<? extends T> c2, @NotNull ArrayFactory<? extends T> factory) {
T[] res = factory.create(c1.size() + c2.size());
int i = 0;
for (T t : c1) {
res[i++] = t;
}
for (T t : c2) {
res[i++] = t;
}
return res;
}
@Contract(pure=true)
public static <T> T @NotNull [] mergeArrays(T @NotNull [] a1, T @NotNull [] a2, @NotNull ArrayFactory<? extends T> factory) {
if (a1.length == 0) {
return a2;
}
if (a2.length == 0) {
return a1;
}
T[] result = factory.create(a1.length + a2.length);
System.arraycopy(a1, 0, result, 0, a1.length);
System.arraycopy(a2, 0, result, a1.length, a2.length);
return result;
}
@Contract(pure=true)
public static String @NotNull [] mergeArrays(String @NotNull [] a1, String @NotNull ... a2) {
return mergeArrays(a1, a2, STRING_ARRAY_FACTORY);
}
@Contract(pure=true)
public static int @NotNull [] mergeArrays(int @NotNull [] a1, int @NotNull [] a2) {
if (a1.length == 0) {
return a2;
}
if (a2.length == 0) {
return a1;
}
int[] result = new int[a1.length + a2.length];
System.arraycopy(a1, 0, result, 0, a1.length);
System.arraycopy(a2, 0, result, a1.length, a2.length);
return result;
}
@Contract(pure=true)
public static byte @NotNull [] mergeArrays(byte @NotNull [] a1, byte @NotNull [] a2) {
if (a1.length == 0) {
return a2;
}
if (a2.length == 0) {
return a1;
}
byte[] result = new byte[a1.length + a2.length];
System.arraycopy(a1, 0, result, 0, a1.length);
System.arraycopy(a2, 0, result, a1.length, a2.length);
return result;
}
/**
* Allocates new array of size {@code array.length + collection.size()} and copies elements of {@code array} and
* {@code collection} to it.
*
* @param array source array
* @param collection source collection
* @param factory array factory used to create destination array of type {@code T}
* @return destination array
*/
@Contract(pure=true)
public static <T> T @NotNull [] mergeArrayAndCollection(T @NotNull [] array,
@NotNull Collection<? extends T> collection,
@NotNull final ArrayFactory<? extends T> factory) {
if (collection.isEmpty()) {
return array;
}
final T[] array2;
try {
T[] a = factory.create(collection.size());
array2 = collection.toArray(a);
}
catch (ArrayStoreException e) {
throw new RuntimeException("Bad elements in collection: " + collection, e);
}
if (array.length == 0) {
return array2;
}
final T[] result = factory.create(array.length + collection.size());
System.arraycopy(array, 0, result, 0, array.length);
System.arraycopy(array2, 0, result, array.length, array2.length);
return result;
}
/**
* Appends {@code element} to the {@code src} array. As you can
* imagine the appended element will be the last one in the returned result.
*
* @param src array to which the {@code element} should be appended.
* @param element object to be appended to the end of {@code src} array.
* @return new array
*/
@Contract(pure=true)
public static <T> T @NotNull [] append(final T @NotNull [] src, @Nullable final T element) {
return append(src, element, getComponentType(src));
}
@Contract(pure=true)
public static <T> T @NotNull [] prepend(final T element, final T @NotNull [] array) {
return prepend(element, array, getComponentType(array));
}
@Contract(pure=true)
public static <T> T @NotNull [] prepend(T element, T @NotNull [] array, @NotNull Class<T> type) {
int length = array.length;
T[] result = newArray(type, length + 1);
System.arraycopy(array, 0, result, 1, length);
result[0] = element;
return result;
}
@Contract(pure=true)
public static <T> T @NotNull [] prepend(final T element, final T @NotNull [] src, @NotNull ArrayFactory<? extends T> factory) {
int length = src.length;
T[] result = factory.create(length + 1);
System.arraycopy(src, 0, result, 1, length);
result[0] = element;
return result;
}
@Contract(pure=true)
public static byte @NotNull [] prepend(byte element, byte @NotNull [] array) {
int length = array.length;
final byte[] result = new byte[length + 1];
result[0] = element;
System.arraycopy(array, 0, result, 1, length);
return result;
}
@Contract(pure=true)
public static <T> T @NotNull [] append(final T @NotNull [] src, final T element, @NotNull ArrayFactory<? extends T> factory) {
int length = src.length;
T[] result = factory.create(length + 1);
System.arraycopy(src, 0, result, 0, length);
result[length] = element;
return result;
}
@Contract(pure=true)
public static <T> T @NotNull [] append(T @NotNull [] src, @Nullable final T element, @NotNull Class<T> componentType) {
int length = src.length;
T[] result = newArray(componentType, length + 1);
System.arraycopy(src, 0, result, 0, length);
result[length] = element;
return result;
}
/**
* Removes element with index {@code idx} from array {@code src}.
*
* @param src array.
* @param idx index of element to be removed.
* @return modified array.
*/
@Contract(pure=true)
public static <T> T @NotNull [] remove(final T @NotNull [] src, int idx) {
int length = src.length;
if (idx < 0 || idx >= length) {
throw new IllegalArgumentException("invalid index: " + idx);
}
Class<T> type = getComponentType(src);
T[] result = newArray(type, length - 1);
System.arraycopy(src, 0, result, 0, idx);
System.arraycopy(src, idx + 1, result, idx, length - idx - 1);
return result;
}
public static <T> T @NotNull [] newArray(@NotNull Class<T> type, int length) {
//noinspection unchecked
return (T[])Array.newInstance(type, length);
}
@Contract(pure=true)
public static <T> T @NotNull [] remove(final T @NotNull [] src, int idx, @NotNull ArrayFactory<? extends T> factory) {
int length = src.length;
if (idx < 0 || idx >= length) {
throw new IllegalArgumentException("invalid index: " + idx);
}
T[] result = factory.create(length - 1);
System.arraycopy(src, 0, result, 0, idx);
System.arraycopy(src, idx + 1, result, idx, length - idx - 1);
return result;
}
@Contract(pure=true)
public static <T> T @NotNull [] remove(final T @NotNull [] src, T element) {
final int idx = find(src, element);
if (idx == -1) return src;
return remove(src, idx);
}
@Contract(pure=true)
public static <T> T @NotNull [] remove(final T @NotNull [] src, T element, @NotNull ArrayFactory<? extends T> factory) {
final int idx = find(src, element);
if (idx == -1) return src;
return remove(src, idx, factory);
}
@Contract(pure=true)
public static int @NotNull [] remove(final int @NotNull [] src, int idx) {
int length = src.length;
if (idx < 0 || idx >= length) {
throw new IllegalArgumentException("invalid index: " + idx);
}
int[] result = newIntArray(src.length - 1);
System.arraycopy(src, 0, result, 0, idx);
System.arraycopy(src, idx + 1, result, idx, length - idx - 1);
return result;
}
@Contract(pure=true)
public static short @NotNull [] remove(final short @NotNull [] src, int idx) {
int length = src.length;
if (idx < 0 || idx >= length) {
throw new IllegalArgumentException("invalid index: " + idx);
}
short[] result = src.length == 1 ? ArrayUtilRt.EMPTY_SHORT_ARRAY : new short[src.length - 1];
System.arraycopy(src, 0, result, 0, idx);
System.arraycopy(src, idx + 1, result, idx, length - idx - 1);
return result;
}
@Contract(pure=true)
public static int find(int @NotNull [] src, int obj) {
return indexOf(src, obj);
}
@Contract(pure=true)
public static <T> int find(final T @NotNull [] src, final T obj) {
return ArrayUtilRt.find(src, obj);
}
@Contract(pure=true)
public static boolean startsWith(byte @NotNull [] array, byte @NotNull [] prefix) {
//noinspection ArrayEquality
if (array == prefix) {
return true;
}
int length = prefix.length;
if (array.length < length) {
return false;
}
for (int i = 0; i < length; i++) {
if (array[i] != prefix[i]) {
return false;
}
}
return true;
}
@Contract(pure=true)
public static <E> boolean startsWith(E @NotNull [] array, E @NotNull [] subArray) {
//noinspection ArrayEquality
if (array == subArray) {
return true;
}
int length = subArray.length;
if (array.length < length) {
return false;
}
for (int i = 0; i < length; i++) {
if (!Comparing.equal(array[i], subArray[i])) {
return false;
}
}
return true;
}
@Contract(pure=true)
public static boolean startsWith(byte @NotNull [] array, int start, byte @NotNull [] subArray) {
int length = subArray.length;
if (array.length - start < length) {
return false;
}
for (int i = 0; i < length; i++) {
if (array[start + i] != subArray[i]) {
return false;
}
}
return true;
}
@Contract(pure=true)
public static <T> boolean equals(T @NotNull [] a1, T @NotNull [] a2, @NotNull Equality<? super T> comparator) {
//noinspection ArrayEquality
if (a1 == a2) {
return true;
}
int length = a2.length;
if (a1.length != length) {
return false;
}
for (int i = 0; i < length; i++) {
if (!comparator.equals(a1[i], a2[i])) {
return false;
}
}
return true;
}
@Contract(pure=true)
public static <T> boolean equals(T @NotNull [] a1, T @NotNull [] a2, @NotNull Comparator<? super T> comparator) {
//noinspection ArrayEquality
if (a1 == a2) {
return true;
}
int length = a2.length;
if (a1.length != length) {
return false;
}
for (int i = 0; i < length; i++) {
if (comparator.compare(a1[i], a2[i]) != 0) {
return false;
}
}
return true;
}
@Contract(pure=true)
public static <T> T @NotNull [] reverseArray(T @NotNull [] array) {
T[] newArray = array.clone();
for (int i = 0; i < array.length; i++) {
newArray[array.length - i - 1] = array[i];
}
return newArray;
}
@Contract(pure=true)
public static int @NotNull [] reverseArray(int @NotNull [] array) {
int[] newArray = array.clone();
for (int i = 0; i < array.length; i++) {
newArray[array.length - i - 1] = array[i];
}
return newArray;
}
@Contract(pure=true)
public static int lexicographicCompare(String @NotNull [] obj1, String @NotNull [] obj2) {
for (int i = 0; i < Math.max(obj1.length, obj2.length); i++) {
String o1 = i < obj1.length ? obj1[i] : null;
String o2 = i < obj2.length ? obj2[i] : null;
if (o1 == null) return -1;
if (o2 == null) return 1;
int res = o1.compareToIgnoreCase(o2);
if (res != 0) return res;
}
return 0;
}
@Contract(pure=true)
public static int lexicographicCompare(int @NotNull [] obj1, int @NotNull [] obj2) {
for (int i = 0; i < Math.min(obj1.length, obj2.length); i++) {
int res = Integer.compare(obj1[i], obj2[i]);
if (res != 0) return res;
}
return Integer.compare(obj1.length, obj2.length);
}
//must be Comparables
@Contract(pure=true)
public static <T> int lexicographicCompare(T @NotNull [] obj1, T @NotNull [] obj2) {
for (int i = 0; i < Math.max(obj1.length, obj2.length); i++) {
T o1 = i < obj1.length ? obj1[i] : null;
T o2 = i < obj2.length ? obj2[i] : null;
if (o1 == null) return -1;
if (o2 == null) return 1;
//noinspection unchecked
int res = ((Comparable<T>)o1).compareTo(o2);
if (res != 0) return res;
}
return 0;
}
public static <T> void swap(T @NotNull [] array, int i1, int i2) {
final T t = array[i1];
array[i1] = array[i2];
array[i2] = t;
}
public static void swap(int @NotNull [] array, int i1, int i2) {
final int t = array[i1];
array[i1] = array[i2];
array[i2] = t;
}
public static void swap(boolean @NotNull [] array, int i1, int i2) {
final boolean t = array[i1];
array[i1] = array[i2];
array[i2] = t;
}
public static void swap(char @NotNull [] array, int i1, int i2) {
final char t = array[i1];
array[i1] = array[i2];
array[i2] = t;
}
public static <T> void rotateLeft(T @NotNull [] array, int i1, int i2) {
final T t = array[i1];
System.arraycopy(array, i1 + 1, array, i1, i2 - i1);
array[i2] = t;
}
public static <T> void rotateRight(T @NotNull [] array, int i1, int i2) {
final T t = array[i2];
System.arraycopy(array, i1, array, i1 + 1, i2 - i1);
array[i1] = t;
}
@Contract(pure=true)
public static int indexOf(Object @NotNull [] objects, @Nullable Object object) {
return ArrayUtilRt.indexOf(objects, object, 0, objects.length);
}
@Contract(pure=true)
public static <T> int indexOf(@NotNull List<? extends T> objects, T object, @NotNull Equality<? super T> comparator) {
for (int i = 0; i < objects.size(); i++) {
if (comparator.equals(objects.get(i), object)) return i;
}
return -1;
}
@Contract(pure=true)
public static <T> int indexOf(@NotNull List<? extends T> objects, T object, @NotNull Comparator<? super T> comparator) {
for (int i = 0; i < objects.size(); i++) {
if (comparator.compare(objects.get(i), object) == 0) return i;
}
return -1;
}
@Contract(pure=true)
public static <T> int indexOf(T @NotNull [] objects, T object, @NotNull Equality<? super T> comparator) {
for (int i = 0; i < objects.length; i++) {
if (comparator.equals(objects[i], object)) return i;
}
return -1;
}
@Contract(pure=true)
public static int indexOf(long @NotNull [] ints, long value) {
for (int i = 0; i < ints.length; i++) {
if (ints[i] == value) return i;
}
return -1;
}
@Contract(pure=true)
public static int indexOf(int @NotNull [] ints, int value) {
for (int i = 0; i < ints.length; i++) {
if (ints[i] == value) return i;
}
return -1;
}
@Contract(pure = true)
public static int indexOf(byte @NotNull [] array, byte @NotNull [] pattern, int startIndex) {
for (int i = startIndex; i <= array.length - pattern.length; i++) {
if (startsWith(array, i, pattern)) {
return i;
}
}
return -1;
}
@Contract(pure=true)
public static <T> int lastIndexOf(final T @NotNull [] src, @Nullable final T obj) {
for (int i = src.length - 1; i >= 0; i--) {
final T o = src[i];
if (o == null) {
if (obj == null) {
return i;
}
}
else {
if (o.equals(obj)) {
return i;
}
}
}
return -1;
}
@Contract(pure=true)
public static int lastIndexOf(final int @NotNull [] src, final int obj) {
for (int i = src.length - 1; i >= 0; i--) {
final int o = src[i];
if (o == obj) {
return i;
}
}
return -1;
}
@Contract(pure=true)
public static int lastIndexOfNot(final int @NotNull [] src, final int obj) {
for (int i = src.length - 1; i >= 0; i--) {
final int o = src[i];
if (o != obj) {
return i;
}
}
return -1;
}
@Contract(pure=true)
public static <T> int lastIndexOf(final T @NotNull [] src, final T obj, @NotNull Equality<? super T> comparator) {
for (int i = src.length - 1; i >= 0; i--) {
final T o = src[i];
if (comparator.equals(obj, o)) {
return i;
}
}
return -1;
}
@Contract(pure=true)
public static <T> int lastIndexOf(@NotNull List<? extends T> src, final T obj, @NotNull Equality<? super T> comparator) {
for (int i = src.size() - 1; i >= 0; i--) {
final T o = src.get(i);
if (comparator.equals(obj, o)) {
return i;
}
}
return -1;
}
@SafeVarargs
@Contract(pure=true)
public static <T> boolean contains(@Nullable final T o, T @NotNull ... objects) {
return indexOf(objects, o) >= 0;
}
@Contract(pure = true)
public static boolean contains(@Nullable String s, String @NotNull ... strings) {
return indexOf(strings, s) >= 0;
}
@Contract(pure=true)
public static int @NotNull [] newIntArray(int count) {
return count == 0 ? ArrayUtilRt.EMPTY_INT_ARRAY : new int[count];
}
@Contract(pure=true)
public static long @NotNull [] newLongArray(int count) {
return count == 0 ? EMPTY_LONG_ARRAY : new long[count];
}
@Contract(pure=true)
public static String @NotNull [] newStringArray(int count) {
return count == 0 ? ArrayUtilRt.EMPTY_STRING_ARRAY : new String[count];
}
@Contract(pure=true)
public static Object @NotNull [] newObjectArray(int count) {
return count == 0 ? ArrayUtilRt.EMPTY_OBJECT_ARRAY : new Object[count];
}
@Contract(pure=true)
public static <E> E @NotNull [] ensureExactSize(int count, E @NotNull [] sample) {
if (count == sample.length) return sample;
return newArray(getComponentType(sample), count);
}
@Nullable
@Contract(value = "null -> null", pure=true)
public static <T> T getFirstElement(T @Nullable [] array) {
return array != null && array.length > 0 ? array[0] : null;
}
@Contract(value = "null -> null", pure=true)
public static <T> T getLastElement(T @Nullable [] array) {
return array != null && array.length > 0 ? array[array.length - 1] : null;
}
@Contract(pure=true)
public static int getLastElement(int @Nullable [] array, int defaultValue) {
return array == null || array.length == 0 ? defaultValue : array[array.length - 1];
}
@Contract(value = "null -> true", pure=true)
public static <T> boolean isEmpty(T @Nullable [] array) {
return array == null || array.length == 0;
}
@Contract(pure=true)
public static String @NotNull [] toStringArray(@Nullable Collection<String> collection) {
return ArrayUtilRt.toStringArray(collection);
}
public static <T> void copy(@NotNull final Collection<? extends T> src, final T @NotNull [] dst, final int dstOffset) {
int i = dstOffset;
for (T t : src) {
dst[i++] = t;
}
}
@Contract(value = "null -> null; !null -> !null", pure = true)
public static <T> T @Nullable [] copyOf(T @Nullable [] original) {
if (original == null) return null;
return Arrays.copyOf(original, original.length);
}
@Contract(value = "null -> null; !null -> !null", pure = true)
public static boolean @Nullable [] copyOf(boolean @Nullable [] original) {
if (original == null) return null;
return original.length == 0 ? ArrayUtilRt.EMPTY_BOOLEAN_ARRAY : Arrays.copyOf(original, original.length);
}
@Contract(value = "null -> null; !null -> !null", pure = true)
public static int @Nullable [] copyOf(int @Nullable [] original) {
if (original == null) return null;
return original.length == 0 ? ArrayUtilRt.EMPTY_INT_ARRAY : Arrays.copyOf(original, original.length);
}
@Contract(pure = true)
public static <T> T @NotNull [] stripTrailingNulls(T @NotNull [] array) {
return array.length != 0 && array[array.length-1] == null ? Arrays.copyOf(array, trailingNullsIndex(array)) : array;
}
private static <T> int trailingNullsIndex(T @NotNull [] array) {
for (int i = array.length - 1; i >= 0; i--) {
if (array[i] != null) {
return i + 1;
}
}
return 0;
}
// calculates average of the median values in the selected part of the array. E.g. for part=3 returns average in the middle third.
public static long averageAmongMedians(long @NotNull [] time, int part) {
assert part >= 1;
int n = time.length;
Arrays.sort(time);
long total = 0;
int start = n / 2 - n / part / 2;
int end = n / 2 + n / part / 2;
for (int i = start; i < end; i++) {
total += time[i];
}
int middlePartLength = end - start;
return middlePartLength == 0 ? 0 : total / middlePartLength;
}
public static long averageAmongMedians(int @NotNull [] time, int part) {
assert part >= 1;
int n = time.length;
Arrays.sort(time);
long total = 0;
int start = n / 2 - n / part / 2;
int end = n / 2 + n / part / 2;
for (int i = start; i < end; i++) {
total += time[i];
}
int middlePartLength = end - start;
return middlePartLength == 0 ? 0 : total / middlePartLength;
}
@Contract(pure = true)
public static int min(int[] values) {
int min = Integer.MAX_VALUE;
for (int value : values) {
if (value < min) min = value;
}
return min;
}
@Contract(pure = true)
public static int max(int[] values) {
int max = Integer.MIN_VALUE;
for (int value : values) {
if (value > max) max = value;
}
return max;
}
@Contract(pure = true)
public static int[] mergeSortedArrays(int[] a1, int[] a2, boolean mergeEqualItems) {
int newSize = a1.length + a2.length;
if (newSize == 0) return ArrayUtilRt.EMPTY_INT_ARRAY;
int[] r = new int[newSize];
int o = 0;
int index1 = 0;
int index2 = 0;
while (index1 < a1.length || index2 < a2.length) {
int e;
if (index1 >= a1.length) {
e = a2[index2++];
}
else if (index2 >= a2.length) {
e = a1[index1++];
}
else {
int element1 = a1[index1];
int element2 = a2[index2];
if (element1 == element2) {
index1++;
index2++;
if (mergeEqualItems) {
e = element1;
}
else {
r[o++] = element1;
e = element2;
}
}
else if (element1 < element2) {
e = element1;
index1++;
}
else {
e = element2;
index2++;
}
}
r[o++] = e;
}
return o == newSize ? r : Arrays.copyOf(r, o);
}
@NotNull
public static <T> Class<T> getComponentType(T @NotNull [] collection) {
//noinspection unchecked
return (Class<T>)collection.getClass().getComponentType();
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.worker.block;
import alluxio.exception.BlockDoesNotExistException;
import alluxio.exception.ExceptionMessage;
import alluxio.master.block.BlockId;
import alluxio.worker.block.meta.BlockMeta;
import alluxio.worker.block.meta.StorageDirView;
import alluxio.worker.block.meta.StorageTier;
import alluxio.worker.block.meta.StorageTierView;
import com.google.common.base.Preconditions;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.concurrent.NotThreadSafe;
/**
* This class exposes a narrower view of {@link BlockMetadataManager} to Evictors and Allocators,
* filtering out un-evictable blocks and un-allocatable space internally, so that evictors and
* allocators can be developed with much simpler logic, without worrying about various constraints,
* e.g. pinned files, locked blocks, etc.
*
* TODO(cc): Filter un-allocatable space.
*/
@NotThreadSafe
public class BlockMetadataManagerView {
/** The {@link BlockMetadataManager} this view is derived from. */
private final BlockMetadataManager mMetadataManager;
/**
* A list of {@link StorageTierView}, derived from {@link StorageTier}s from the
* {@link BlockMetadataManager}.
*/
private List<StorageTierView> mTierViews = new ArrayList<>();
/** A list of pinned inodes. */
private final Set<Long> mPinnedInodes = new HashSet<>();
/** Indices of locks that are being used. */
private final Set<Long> mInUseBlocks = new HashSet<>();
/** A map from tier alias to {@link StorageTierView}. */
private Map<String, StorageTierView> mAliasToTierViews = new HashMap<>();
/**
* Creates a new instance of {@link BlockMetadataManagerView}. Now we always create a new view
* before freespace.
*
* @param manager which the view should be constructed from
* @param pinnedInodes a set of pinned inodes
* @param lockedBlocks a set of locked blocks
*/
// TODO(qifan): Incrementally update the view.
public BlockMetadataManagerView(BlockMetadataManager manager, Set<Long> pinnedInodes,
Set<Long> lockedBlocks) {
mMetadataManager = Preconditions.checkNotNull(manager);
mPinnedInodes.addAll(Preconditions.checkNotNull(pinnedInodes));
Preconditions.checkNotNull(lockedBlocks);
mInUseBlocks.addAll(lockedBlocks);
// iteratively create all StorageTierViews and StorageDirViews
for (StorageTier tier : manager.getTiers()) {
StorageTierView tierView = new StorageTierView(tier, this);
mTierViews.add(tierView);
mAliasToTierViews.put(tier.getTierAlias(), tierView);
}
}
/**
* Clears all marks of blocks to move in/out in all dir views.
*/
public void clearBlockMarks() {
for (StorageTierView tierView : mTierViews) {
for (StorageDirView dirView : tierView.getDirViews()) {
dirView.clearBlockMarks();
}
}
}
/**
* Tests if the block is pinned.
*
* @param blockId to be tested
* @return boolean, true if block is pinned
*/
public boolean isBlockPinned(long blockId) {
return mPinnedInodes.contains(
BlockId.createBlockId(BlockId.getContainerId(blockId), BlockId.getMaxSequenceNumber()));
}
/**
* Tests if the block is locked.
*
* @param blockId to be tested
* @return boolean, true if block is locked
*/
public boolean isBlockLocked(long blockId) {
return mInUseBlocks.contains(blockId);
}
/**
* Tests if the block is evictable.
*
* @param blockId to be tested
* @return boolean, true if the block can be evicted
*/
public boolean isBlockEvictable(long blockId) {
return (!isBlockPinned(blockId) && !isBlockLocked(blockId) && !isBlockMarked(blockId));
}
/**
* Tests if the block is marked to move out of its current dir in this view.
*
* @param blockId the id of the block
* @return boolean, true if the block is marked to move out
*/
public boolean isBlockMarked(long blockId) {
for (StorageTierView tierView : mTierViews) {
for (StorageDirView dirView : tierView.getDirViews()) {
if (dirView.isMarkedToMoveOut(blockId)) {
return true;
}
}
}
return false;
}
/**
* Provides {@link StorageTierView} given tierAlias. Throws an {@link IllegalArgumentException} if
* the tierAlias is not found.
*
* @param tierAlias the alias of this tierView
* @return the {@link StorageTierView} object associated with the alias
*/
public StorageTierView getTierView(String tierAlias) {
StorageTierView tierView = mAliasToTierViews.get(tierAlias);
if (tierView == null) {
throw new IllegalArgumentException(
ExceptionMessage.TIER_VIEW_ALIAS_NOT_FOUND.getMessage(tierAlias));
} else {
return tierView;
}
}
/**
* Gets all tierViews under this managerView.
*
* @return the list of {@link StorageTierView}s
*/
public List<StorageTierView> getTierViews() {
return Collections.unmodifiableList(mTierViews);
}
/**
* Gets all tierViews before certain tierView. Throws an {@link IllegalArgumentException} if the
* tierAlias is not found.
*
* @param tierAlias the alias of a tierView
* @return the list of {@link StorageTierView}
*/
public List<StorageTierView> getTierViewsBelow(String tierAlias) {
int ordinal = getTierView(tierAlias).getTierViewOrdinal();
return mTierViews.subList(ordinal + 1, mTierViews.size());
}
/**
* Gets the next storage tier view.
*
* @param tierView the storage tier view
* @return the next storage tier view, null if this is the last tier view
*/
public StorageTierView getNextTier(StorageTierView tierView) {
int nextOrdinal = tierView.getTierViewOrdinal() + 1;
if (nextOrdinal < mTierViews.size()) {
return mTierViews.get(nextOrdinal);
}
return null;
}
/**
* Gets available bytes given certain location
* {@link BlockMetadataManager#getAvailableBytes(BlockStoreLocation)}. Throws an
* {@link IllegalArgumentException} if the location does not belong to tiered storage.
*
* @param location location the check available bytes
* @return available bytes
*/
public long getAvailableBytes(BlockStoreLocation location) {
return mMetadataManager.getAvailableBytes(location);
}
/**
* Returns null if block is pinned or currently being locked, otherwise returns
* {@link BlockMetadataManager#getBlockMeta(long)}.
*
* @param blockId the block id
* @return metadata of the block or null
* @throws BlockDoesNotExistException if no {@link BlockMeta} for this block id is found
*/
public BlockMeta getBlockMeta(long blockId) throws BlockDoesNotExistException {
if (isBlockEvictable(blockId)) {
return mMetadataManager.getBlockMeta(blockId);
} else {
return null;
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment.incremental;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import io.druid.collections.NonBlockingPool;
import io.druid.common.guava.GuavaUtils;
import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedRow;
import io.druid.data.input.Row;
import io.druid.data.input.impl.DimensionSchema;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.SpatialDimensionSchema;
import io.druid.java.util.common.IAE;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.PostAggregator;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.groupby.RowBasedColumnSelectorFactory;
import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.DimensionHandler;
import io.druid.segment.DimensionHandlerUtils;
import io.druid.segment.DimensionIndexer;
import io.druid.segment.DimensionSelector;
import io.druid.segment.DoubleColumnSelector;
import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector;
import io.druid.segment.Metadata;
import io.druid.segment.ObjectColumnSelector;
import io.druid.segment.VirtualColumns;
import io.druid.segment.column.Column;
import io.druid.segment.column.ColumnCapabilities;
import io.druid.segment.column.ColumnCapabilitiesImpl;
import io.druid.segment.column.ValueType;
import io.druid.segment.serde.ComplexMetricExtractor;
import io.druid.segment.serde.ComplexMetricSerde;
import io.druid.segment.serde.ComplexMetrics;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import java.io.Closeable;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
/**
*/
public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>, Closeable
{
private volatile DateTime maxIngestedEventTime;
// Used to discover ValueType based on the class of values in a row
// Also used to convert between the duplicate ValueType enums in DimensionSchema (druid-api) and main druid.
private static final Map<Object, ValueType> TYPE_MAP = ImmutableMap.<Object, ValueType>builder()
.put(Long.class, ValueType.LONG)
.put(Double.class, ValueType.DOUBLE)
.put(Float.class, ValueType.FLOAT)
.put(String.class, ValueType.STRING)
.put(DimensionSchema.ValueType.LONG, ValueType.LONG)
.put(DimensionSchema.ValueType.FLOAT, ValueType.FLOAT)
.put(DimensionSchema.ValueType.STRING, ValueType.STRING)
.put(DimensionSchema.ValueType.DOUBLE, ValueType.DOUBLE)
.build();
/**
* Column selector used at ingestion time for inputs to aggregators.
*
* @param agg the aggregator
* @param in ingestion-time input row supplier
* @param deserializeComplexMetrics whether complex objects should be deserialized by a {@link ComplexMetricExtractor}
*
* @return column selector factory
*/
public static ColumnSelectorFactory makeColumnSelectorFactory(
final VirtualColumns virtualColumns,
final AggregatorFactory agg,
final Supplier<InputRow> in,
final boolean deserializeComplexMetrics
)
{
final RowBasedColumnSelectorFactory baseSelectorFactory = RowBasedColumnSelectorFactory.create(in, null);
class IncrementalIndexInputRowColumnSelectorFactory implements ColumnSelectorFactory
{
@Override
public LongColumnSelector makeLongColumnSelector(final String columnName)
{
return baseSelectorFactory.makeLongColumnSelector(columnName);
}
@Override
public FloatColumnSelector makeFloatColumnSelector(final String columnName)
{
return baseSelectorFactory.makeFloatColumnSelector(columnName);
}
@Override
public ObjectColumnSelector makeObjectColumnSelector(final String column)
{
final String typeName = agg.getTypeName();
final ObjectColumnSelector rawColumnSelector = baseSelectorFactory.makeObjectColumnSelector(column);
if ((GuavaUtils.getEnumIfPresent(ValueType.class, StringUtils.toUpperCase(typeName)) != null && !typeName.equalsIgnoreCase(ValueType.COMPLEX.name()))
|| !deserializeComplexMetrics) {
return rawColumnSelector;
} else {
final ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);
if (serde == null) {
throw new ISE("Don't know how to handle type[%s]", typeName);
}
final ComplexMetricExtractor extractor = serde.getExtractor();
return new ObjectColumnSelector()
{
@Override
public Class classOfObject()
{
return extractor.extractedClass();
}
@Override
public Object get()
{
return extractor.extractValue(in.get(), column);
}
};
}
}
@Override
public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec)
{
return baseSelectorFactory.makeDimensionSelector(dimensionSpec);
}
@Nullable
@Override
public ColumnCapabilities getColumnCapabilities(String columnName)
{
return baseSelectorFactory.getColumnCapabilities(columnName);
}
@Override
public DoubleColumnSelector makeDoubleColumnSelector(String columnName)
{
return baseSelectorFactory.makeDoubleColumnSelector(columnName);
}
}
return virtualColumns.wrap(new IncrementalIndexInputRowColumnSelectorFactory());
}
private final long minTimestamp;
private final Granularity gran;
private final boolean rollup;
private final List<Function<InputRow, InputRow>> rowTransformers;
private final VirtualColumns virtualColumns;
private final AggregatorFactory[] metrics;
private final AggregatorType[] aggs;
private final boolean deserializeComplexMetrics;
private final boolean reportParseExceptions;
private final Metadata metadata;
private final Map<String, MetricDesc> metricDescs;
private final Map<String, DimensionDesc> dimensionDescs;
private final List<DimensionDesc> dimensionDescsList;
private final Map<String, ColumnCapabilitiesImpl> columnCapabilities;
private final AtomicInteger numEntries = new AtomicInteger();
// This is modified on add() in a critical section.
private final ThreadLocal<InputRow> in = new ThreadLocal<>();
private final Supplier<InputRow> rowSupplier = in::get;
/**
* Setting deserializeComplexMetrics to false is necessary for intermediate aggregation such as groupBy that
* should not deserialize input columns using ComplexMetricSerde for aggregators that return complex metrics.
*
* Set concurrentEventAdd to true to indicate that adding of input row should be thread-safe (for example, groupBy
* where the multiple threads can add concurrently to the IncrementalIndex).
*
* @param incrementalIndexSchema the schema to use for incremental index
* @param deserializeComplexMetrics flag whether or not to call ComplexMetricExtractor.extractValue() on the input
* value for aggregators that return metrics other than float.
* @param reportParseExceptions flag whether or not to report ParseExceptions that occur while extracting values
* from input rows
* @param concurrentEventAdd flag whether ot not adding of input rows should be thread-safe
*/
protected IncrementalIndex(
final IncrementalIndexSchema incrementalIndexSchema,
final boolean deserializeComplexMetrics,
final boolean reportParseExceptions,
final boolean concurrentEventAdd
)
{
this.minTimestamp = incrementalIndexSchema.getMinTimestamp();
this.gran = incrementalIndexSchema.getGran();
this.rollup = incrementalIndexSchema.isRollup();
this.virtualColumns = incrementalIndexSchema.getVirtualColumns();
this.metrics = incrementalIndexSchema.getMetrics();
this.rowTransformers = new CopyOnWriteArrayList<>();
this.deserializeComplexMetrics = deserializeComplexMetrics;
this.reportParseExceptions = reportParseExceptions;
this.columnCapabilities = Maps.newHashMap();
this.metadata = new Metadata()
.setAggregators(getCombiningAggregators(metrics))
.setTimestampSpec(incrementalIndexSchema.getTimestampSpec())
.setQueryGranularity(this.gran)
.setRollup(this.rollup);
this.aggs = initAggs(metrics, rowSupplier, deserializeComplexMetrics, concurrentEventAdd);
this.metricDescs = Maps.newLinkedHashMap();
for (AggregatorFactory metric : metrics) {
MetricDesc metricDesc = new MetricDesc(metricDescs.size(), metric);
metricDescs.put(metricDesc.getName(), metricDesc);
columnCapabilities.put(metricDesc.getName(), metricDesc.getCapabilities());
}
DimensionsSpec dimensionsSpec = incrementalIndexSchema.getDimensionsSpec();
this.dimensionDescs = Maps.newLinkedHashMap();
this.dimensionDescsList = new ArrayList<>();
for (DimensionSchema dimSchema : dimensionsSpec.getDimensions()) {
ValueType type = TYPE_MAP.get(dimSchema.getValueType());
String dimName = dimSchema.getName();
ColumnCapabilitiesImpl capabilities = makeCapabilitesFromValueType(type);
if (dimSchema.getTypeName().equals(DimensionSchema.SPATIAL_TYPE_NAME)) {
capabilities.setHasSpatialIndexes(true);
} else {
DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities(
dimName,
capabilities,
dimSchema.getMultiValueHandling()
);
addNewDimension(dimName, capabilities, handler);
}
columnCapabilities.put(dimName, capabilities);
}
//__time capabilities
ColumnCapabilitiesImpl timeCapabilities = new ColumnCapabilitiesImpl();
timeCapabilities.setType(ValueType.LONG);
columnCapabilities.put(Column.TIME_COLUMN_NAME, timeCapabilities);
// This should really be more generic
List<SpatialDimensionSchema> spatialDimensions = dimensionsSpec.getSpatialDimensions();
if (!spatialDimensions.isEmpty()) {
this.rowTransformers.add(new SpatialDimensionRowTransformer(spatialDimensions));
}
}
public static class Builder
{
private IncrementalIndexSchema incrementalIndexSchema;
private boolean deserializeComplexMetrics;
private boolean reportParseExceptions;
private boolean concurrentEventAdd;
private boolean sortFacts;
private int maxRowCount;
public Builder()
{
incrementalIndexSchema = null;
deserializeComplexMetrics = true;
reportParseExceptions = true;
concurrentEventAdd = false;
sortFacts = true;
maxRowCount = 0;
}
public Builder setIndexSchema(final IncrementalIndexSchema incrementalIndexSchema)
{
this.incrementalIndexSchema = incrementalIndexSchema;
return this;
}
/**
* A helper method to set a simple index schema with only metrics and default values for the other parameters. Note
* that this method is normally used for testing and benchmarking; it is unlikely that you would use it in
* production settings.
*
* @param metrics variable array of {@link AggregatorFactory} metrics
*
* @return this
*/
@VisibleForTesting
public Builder setSimpleTestingIndexSchema(final AggregatorFactory... metrics)
{
this.incrementalIndexSchema = new IncrementalIndexSchema.Builder()
.withMetrics(metrics)
.build();
return this;
}
public Builder setDeserializeComplexMetrics(final boolean deserializeComplexMetrics)
{
this.deserializeComplexMetrics = deserializeComplexMetrics;
return this;
}
public Builder setReportParseExceptions(final boolean reportParseExceptions)
{
this.reportParseExceptions = reportParseExceptions;
return this;
}
public Builder setConcurrentEventAdd(final boolean concurrentEventAdd)
{
this.concurrentEventAdd = concurrentEventAdd;
return this;
}
public Builder setSortFacts(final boolean sortFacts)
{
this.sortFacts = sortFacts;
return this;
}
public Builder setMaxRowCount(final int maxRowCount)
{
this.maxRowCount = maxRowCount;
return this;
}
public IncrementalIndex buildOnheap()
{
if (maxRowCount <= 0) {
throw new IllegalArgumentException("Invalid max row count: " + maxRowCount);
}
return new OnheapIncrementalIndex(
Objects.requireNonNull(incrementalIndexSchema, "incrementIndexSchema is null"),
deserializeComplexMetrics,
reportParseExceptions,
concurrentEventAdd,
sortFacts,
maxRowCount
);
}
public IncrementalIndex buildOffheap(final NonBlockingPool<ByteBuffer> bufferPool)
{
if (maxRowCount <= 0) {
throw new IllegalArgumentException("Invalid max row count: " + maxRowCount);
}
return new OffheapIncrementalIndex(
Objects.requireNonNull(incrementalIndexSchema, "incrementalIndexSchema is null"),
deserializeComplexMetrics,
reportParseExceptions,
concurrentEventAdd,
sortFacts,
maxRowCount,
Objects.requireNonNull(bufferPool, "bufferPool is null")
);
}
}
public boolean isRollup()
{
return rollup;
}
public abstract FactsHolder getFacts();
public abstract boolean canAppendRow();
public abstract String getOutOfRowsReason();
protected abstract AggregatorType[] initAggs(
AggregatorFactory[] metrics,
Supplier<InputRow> rowSupplier,
boolean deserializeComplexMetrics,
boolean concurrentEventAdd
);
// Note: This method needs to be thread safe.
protected abstract Integer addToFacts(
AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
Supplier<InputRow> rowSupplier
) throws IndexSizeExceededException;
public abstract int getLastRowIndex();
protected abstract AggregatorType[] getAggsForRow(int rowOffset);
protected abstract Object getAggVal(AggregatorType agg, int rowOffset, int aggPosition);
protected abstract float getMetricFloatValue(int rowOffset, int aggOffset);
protected abstract long getMetricLongValue(int rowOffset, int aggOffset);
protected abstract Object getMetricObjectValue(int rowOffset, int aggOffset);
protected abstract double getMetricDoubleValue(int rowOffset, int aggOffset);
@Override
public void close()
{
}
public InputRow formatRow(InputRow row)
{
for (Function<InputRow, InputRow> rowTransformer : rowTransformers) {
row = rowTransformer.apply(row);
}
if (row == null) {
throw new IAE("Row is null? How can this be?!");
}
return row;
}
private ValueType getTypeFromDimVal(Object dimVal)
{
Object singleVal;
if (dimVal instanceof List) {
List dimValList = (List) dimVal;
singleVal = dimValList.size() == 0 ? null : dimValList.get(0);
} else {
singleVal = dimVal;
}
if (singleVal == null) {
return null;
}
return TYPE_MAP.get(singleVal.getClass());
}
public Map<String, ColumnCapabilitiesImpl> getColumnCapabilities()
{
return columnCapabilities;
}
/**
* Adds a new row. The row might correspond with another row that already exists, in which case this will
* update that row instead of inserting a new one.
* <p>
* <p>
* Calls to add() are thread safe.
* <p>
*
* @param row the row of data to add
*
* @return the number of rows in the data set after adding the InputRow
*/
public int add(InputRow row) throws IndexSizeExceededException
{
TimeAndDims key = toTimeAndDims(row);
final int rv = addToFacts(
metrics,
deserializeComplexMetrics,
reportParseExceptions,
row,
numEntries,
key,
in,
rowSupplier
);
updateMaxIngestedTime(row.getTimestamp());
return rv;
}
@VisibleForTesting
TimeAndDims toTimeAndDims(InputRow row) throws IndexSizeExceededException
{
row = formatRow(row);
if (row.getTimestampFromEpoch() < minTimestamp) {
throw new IAE("Cannot add row[%s] because it is below the minTimestamp[%s]", row, new DateTime(minTimestamp));
}
final List<String> rowDimensions = row.getDimensions();
Object[] dims;
List<Object> overflow = null;
synchronized (dimensionDescs) {
dims = new Object[dimensionDescs.size()];
for (String dimension : rowDimensions) {
boolean wasNewDim = false;
ColumnCapabilitiesImpl capabilities;
DimensionDesc desc = dimensionDescs.get(dimension);
if (desc != null) {
capabilities = desc.getCapabilities();
} else {
wasNewDim = true;
capabilities = columnCapabilities.get(dimension);
if (capabilities == null) {
capabilities = new ColumnCapabilitiesImpl();
// For schemaless type discovery, assume everything is a String for now, can change later.
capabilities.setType(ValueType.STRING);
capabilities.setDictionaryEncoded(true);
capabilities.setHasBitmapIndexes(true);
columnCapabilities.put(dimension, capabilities);
}
DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities(dimension, capabilities, null);
desc = addNewDimension(dimension, capabilities, handler);
}
DimensionHandler handler = desc.getHandler();
DimensionIndexer indexer = desc.getIndexer();
Object dimsKey = indexer.processRowValsToUnsortedEncodedKeyComponent(row.getRaw(dimension));
// Set column capabilities as data is coming in
if (!capabilities.hasMultipleValues() && dimsKey != null && handler.getLengthOfEncodedKeyComponent(dimsKey) > 1) {
capabilities.setHasMultipleValues(true);
}
if (wasNewDim) {
if (overflow == null) {
overflow = Lists.newArrayList();
}
overflow.add(dimsKey);
} else if (desc.getIndex() > dims.length || dims[desc.getIndex()] != null) {
/*
* index > dims.length requires that we saw this dimension and added it to the dimensionOrder map,
* otherwise index is null. Since dims is initialized based on the size of dimensionOrder on each call to add,
* it must have been added to dimensionOrder during this InputRow.
*
* if we found an index for this dimension it means we've seen it already. If !(index > dims.length) then
* we saw it on a previous input row (this its safe to index into dims). If we found a value in
* the dims array for this index, it means we have seen this dimension already on this input row.
*/
throw new ISE("Dimension[%s] occurred more than once in InputRow", dimension);
} else {
dims[desc.getIndex()] = dimsKey;
}
}
}
if (overflow != null) {
// Merge overflow and non-overflow
Object[] newDims = new Object[dims.length + overflow.size()];
System.arraycopy(dims, 0, newDims, 0, dims.length);
for (int i = 0; i < overflow.size(); ++i) {
newDims[dims.length + i] = overflow.get(i);
}
dims = newDims;
}
long truncated = 0;
if (row.getTimestamp() != null) {
truncated = gran.bucketStart(row.getTimestamp()).getMillis();
}
return new TimeAndDims(Math.max(truncated, minTimestamp), dims, dimensionDescsList);
}
private synchronized void updateMaxIngestedTime(DateTime eventTime)
{
if (maxIngestedEventTime == null || maxIngestedEventTime.isBefore(eventTime)) {
maxIngestedEventTime = eventTime;
}
}
public boolean isEmpty()
{
return numEntries.get() == 0;
}
public int size()
{
return numEntries.get();
}
private long getMinTimeMillis()
{
return getFacts().getMinTimeMillis();
}
private long getMaxTimeMillis()
{
return getFacts().getMaxTimeMillis();
}
public AggregatorType[] getAggs()
{
return aggs;
}
public AggregatorFactory[] getMetricAggs()
{
return metrics;
}
public List<String> getDimensionNames()
{
synchronized (dimensionDescs) {
return ImmutableList.copyOf(dimensionDescs.keySet());
}
}
public List<DimensionDesc> getDimensions()
{
synchronized (dimensionDescs) {
return ImmutableList.copyOf(dimensionDescs.values());
}
}
public DimensionDesc getDimension(String dimension)
{
synchronized (dimensionDescs) {
return dimensionDescs.get(dimension);
}
}
public String getMetricType(String metric)
{
final MetricDesc metricDesc = metricDescs.get(metric);
return metricDesc != null ? metricDesc.getType() : null;
}
public Class getMetricClass(String metric)
{
MetricDesc metricDesc = metricDescs.get(metric);
switch (metricDesc.getCapabilities().getType()) {
case COMPLEX:
return ComplexMetrics.getSerdeForType(metricDesc.getType()).getObjectStrategy().getClazz();
case DOUBLE:
return Double.class;
case FLOAT:
return Float.class;
case LONG:
return Long.class;
case STRING:
return String.class;
}
return null;
}
public Interval getInterval()
{
return new Interval(minTimestamp, isEmpty() ? minTimestamp : gran.increment(new DateTime(getMaxTimeMillis())).getMillis());
}
public DateTime getMinTime()
{
return isEmpty() ? null : new DateTime(getMinTimeMillis());
}
public DateTime getMaxTime()
{
return isEmpty() ? null : new DateTime(getMaxTimeMillis());
}
public Integer getDimensionIndex(String dimension)
{
DimensionDesc dimSpec = getDimension(dimension);
return dimSpec == null ? null : dimSpec.getIndex();
}
public List<String> getDimensionOrder()
{
synchronized (dimensionDescs) {
return ImmutableList.copyOf(dimensionDescs.keySet());
}
}
private ColumnCapabilitiesImpl makeCapabilitesFromValueType(ValueType type)
{
ColumnCapabilitiesImpl capabilities = new ColumnCapabilitiesImpl();
capabilities.setDictionaryEncoded(type == ValueType.STRING);
capabilities.setHasBitmapIndexes(type == ValueType.STRING);
capabilities.setType(type);
return capabilities;
}
/**
* Currently called to initialize IncrementalIndex dimension order during index creation
* Index dimension ordering could be changed to initialize from DimensionsSpec after resolution of
* https://github.com/druid-io/druid/issues/2011
*/
public void loadDimensionIterable(Iterable<String> oldDimensionOrder, Map<String, ColumnCapabilitiesImpl> oldColumnCapabilities)
{
synchronized (dimensionDescs) {
if (!dimensionDescs.isEmpty()) {
throw new ISE("Cannot load dimension order when existing order[%s] is not empty.", dimensionDescs.keySet());
}
for (String dim : oldDimensionOrder) {
if (dimensionDescs.get(dim) == null) {
ColumnCapabilitiesImpl capabilities = oldColumnCapabilities.get(dim);
columnCapabilities.put(dim, capabilities);
DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities(dim, capabilities, null);
addNewDimension(dim, capabilities, handler);
}
}
}
}
@GuardedBy("dimensionDescs")
private DimensionDesc addNewDimension(String dim, ColumnCapabilitiesImpl capabilities, DimensionHandler handler)
{
DimensionDesc desc = new DimensionDesc(dimensionDescs.size(), dim, capabilities, handler);
dimensionDescs.put(dim, desc);
dimensionDescsList.add(desc);
return desc;
}
public List<String> getMetricNames()
{
return ImmutableList.copyOf(metricDescs.keySet());
}
public List<MetricDesc> getMetrics()
{
return ImmutableList.copyOf(metricDescs.values());
}
public Integer getMetricIndex(String metricName)
{
MetricDesc metSpec = metricDescs.get(metricName);
return metSpec == null ? null : metSpec.getIndex();
}
public ColumnCapabilities getCapabilities(String column)
{
return columnCapabilities.get(column);
}
public Metadata getMetadata()
{
return metadata;
}
private static AggregatorFactory[] getCombiningAggregators(AggregatorFactory[] aggregators)
{
AggregatorFactory[] combiningAggregators = new AggregatorFactory[aggregators.length];
for (int i = 0; i < aggregators.length; i++) {
combiningAggregators[i] = aggregators[i].getCombiningFactory();
}
return combiningAggregators;
}
public Map<String, DimensionHandler> getDimensionHandlers()
{
Map<String, DimensionHandler> handlers = Maps.newLinkedHashMap();
for (DimensionDesc desc : dimensionDescsList) {
handlers.put(desc.getName(), desc.getHandler());
}
return handlers;
}
@Override
public Iterator<Row> iterator()
{
return iterableWithPostAggregations(null, false).iterator();
}
public Iterable<Row> iterableWithPostAggregations(final List<PostAggregator> postAggs, final boolean descending)
{
return new Iterable<Row>()
{
@Override
public Iterator<Row> iterator()
{
final List<DimensionDesc> dimensions = getDimensions();
return Iterators.transform(
getFacts().iterator(descending),
timeAndDims -> {
final int rowOffset = timeAndDims.getRowIndex();
Object[] theDims = timeAndDims.getDims();
Map<String, Object> theVals = Maps.newLinkedHashMap();
for (int i = 0; i < theDims.length; ++i) {
Object dim = theDims[i];
DimensionDesc dimensionDesc = dimensions.get(i);
if (dimensionDesc == null) {
continue;
}
String dimensionName = dimensionDesc.getName();
DimensionHandler handler = dimensionDesc.getHandler();
if (dim == null || handler.getLengthOfEncodedKeyComponent(dim) == 0) {
theVals.put(dimensionName, null);
continue;
}
final DimensionIndexer indexer = dimensionDesc.getIndexer();
Object rowVals = indexer.convertUnsortedEncodedKeyComponentToActualArrayOrList(dim, DimensionIndexer.LIST);
theVals.put(dimensionName, rowVals);
}
AggregatorType[] aggs = getAggsForRow(rowOffset);
for (int i = 0; i < aggs.length; ++i) {
theVals.put(metrics[i].getName(), getAggVal(aggs[i], rowOffset, i));
}
if (postAggs != null) {
for (PostAggregator postAgg : postAggs) {
theVals.put(postAgg.getName(), postAgg.compute(theVals));
}
}
return new MapBasedRow(timeAndDims.getTimestamp(), theVals);
}
);
}
};
}
public DateTime getMaxIngestedEventTime()
{
return maxIngestedEventTime;
}
public static final class DimensionDesc
{
private final int index;
private final String name;
private final ColumnCapabilitiesImpl capabilities;
private final DimensionHandler handler;
private final DimensionIndexer indexer;
public DimensionDesc(int index, String name, ColumnCapabilitiesImpl capabilities, DimensionHandler handler)
{
this.index = index;
this.name = name;
this.capabilities = capabilities;
this.handler = handler;
this.indexer = handler.makeIndexer();
}
public int getIndex()
{
return index;
}
public String getName()
{
return name;
}
public ColumnCapabilitiesImpl getCapabilities()
{
return capabilities;
}
public DimensionHandler getHandler()
{
return handler;
}
public DimensionIndexer getIndexer()
{
return indexer;
}
}
public static final class MetricDesc
{
private final int index;
private final String name;
private final String type;
private final ColumnCapabilitiesImpl capabilities;
public MetricDesc(int index, AggregatorFactory factory)
{
this.index = index;
this.name = factory.getName();
String typeInfo = factory.getTypeName();
this.capabilities = new ColumnCapabilitiesImpl();
if (typeInfo.equalsIgnoreCase("float")) {
capabilities.setType(ValueType.FLOAT);
this.type = typeInfo;
} else if (typeInfo.equalsIgnoreCase("long")) {
capabilities.setType(ValueType.LONG);
this.type = typeInfo;
} else if (typeInfo.equalsIgnoreCase("double")) {
capabilities.setType(ValueType.DOUBLE);
this.type = typeInfo;
} else {
capabilities.setType(ValueType.COMPLEX);
this.type = ComplexMetrics.getSerdeForType(typeInfo).getTypeName();
}
}
public int getIndex()
{
return index;
}
public String getName()
{
return name;
}
public String getType()
{
return type;
}
public ColumnCapabilitiesImpl getCapabilities()
{
return capabilities;
}
}
public static final class TimeAndDims
{
public static final int EMPTY_ROW_INDEX = -1;
private final long timestamp;
private final Object[] dims;
private final List<DimensionDesc> dimensionDescsList;
/**
* rowIndex is not checked in {@link #equals} and {@link #hashCode} on purpose. TimeAndDims acts as a Map key
* and "entry" object (rowIndex is the "value") at the same time. This is done to reduce object indirection and
* improve locality, and avoid boxing of rowIndex as Integer, when stored in JDK collection:
* {@link RollupFactsHolder} needs concurrent collections, that are not present in fastutil.
*/
private int rowIndex;
TimeAndDims(
long timestamp,
Object[] dims,
List<DimensionDesc> dimensionDescsList
)
{
this(timestamp, dims, dimensionDescsList, EMPTY_ROW_INDEX);
}
TimeAndDims(
long timestamp,
Object[] dims,
List<DimensionDesc> dimensionDescsList,
int rowIndex
)
{
this.timestamp = timestamp;
this.dims = dims;
this.dimensionDescsList = dimensionDescsList;
this.rowIndex = rowIndex;
}
public long getTimestamp()
{
return timestamp;
}
public Object[] getDims()
{
return dims;
}
public int getRowIndex()
{
return rowIndex;
}
private void setRowIndex(int rowIndex)
{
this.rowIndex = rowIndex;
}
@Override
public String toString()
{
return "TimeAndDims{" +
"timestamp=" + new DateTime(timestamp) +
", dims=" + Lists.transform(
Arrays.asList(dims), new Function<Object, Object>()
{
@Override
public Object apply(@Nullable Object input)
{
if (input == null || Array.getLength(input) == 0) {
return Collections.singletonList("null");
}
return Collections.singletonList(input);
}
}
) + '}';
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TimeAndDims that = (TimeAndDims) o;
if (timestamp != that.timestamp) {
return false;
}
if (dims.length != that.dims.length) {
return false;
}
for (int i = 0; i < dims.length; i++) {
final DimensionIndexer indexer = dimensionDescsList.get(i).getIndexer();
if (!indexer.checkUnsortedEncodedKeyComponentsEqual(dims[i], that.dims[i])) {
return false;
}
}
return true;
}
@Override
public int hashCode()
{
int hash = (int) timestamp;
for (int i = 0; i < dims.length; i++) {
final DimensionIndexer indexer = dimensionDescsList.get(i).getIndexer();
hash = 31 * hash + indexer.getUnsortedEncodedKeyComponentHashCode(dims[i]);
}
return hash;
}
}
protected ColumnSelectorFactory makeColumnSelectorFactory(
final AggregatorFactory agg,
final Supplier<InputRow> in,
final boolean deserializeComplexMetrics
)
{
return makeColumnSelectorFactory(virtualColumns, agg, in, deserializeComplexMetrics);
}
protected final Comparator<TimeAndDims> dimsComparator()
{
return new TimeAndDimsComp(dimensionDescsList);
}
@VisibleForTesting
static final class TimeAndDimsComp implements Comparator<TimeAndDims>
{
private List<DimensionDesc> dimensionDescs;
public TimeAndDimsComp(List<DimensionDesc> dimDescs)
{
this.dimensionDescs = dimDescs;
}
@Override
public int compare(TimeAndDims lhs, TimeAndDims rhs)
{
int retVal = Longs.compare(lhs.timestamp, rhs.timestamp);
int numComparisons = Math.min(lhs.dims.length, rhs.dims.length);
int index = 0;
while (retVal == 0 && index < numComparisons) {
final Object lhsIdxs = lhs.dims[index];
final Object rhsIdxs = rhs.dims[index];
if (lhsIdxs == null) {
if (rhsIdxs == null) {
++index;
continue;
}
return -1;
}
if (rhsIdxs == null) {
return 1;
}
final DimensionIndexer indexer = dimensionDescs.get(index).getIndexer();
retVal = indexer.compareUnsortedEncodedKeyComponents(lhsIdxs, rhsIdxs);
++index;
}
if (retVal == 0) {
int lengthDiff = Ints.compare(lhs.dims.length, rhs.dims.length);
if (lengthDiff == 0) {
return 0;
}
Object[] largerDims = lengthDiff > 0 ? lhs.dims : rhs.dims;
return allNull(largerDims, numComparisons) ? 0 : lengthDiff;
}
return retVal;
}
}
private static boolean allNull(Object[] dims, int startPosition)
{
for (int i = startPosition; i < dims.length; i++) {
if (dims[i] != null) {
return false;
}
}
return true;
}
interface FactsHolder
{
/**
* @return the previous rowIndex associated with the specified key, or
* {@code TimeAndDims#EMPTY_ROW_INDEX} if there was no mapping for the key.
*/
int getPriorIndex(TimeAndDims key);
long getMinTimeMillis();
long getMaxTimeMillis();
Iterator<TimeAndDims> iterator(boolean descending);
Iterable<TimeAndDims> timeRangeIterable(boolean descending, long timeStart, long timeEnd);
Iterable<TimeAndDims> keySet();
/**
* @return the previous rowIndex associated with the specified key, or
* {@code TimeAndDims#EMPTY_ROW_INDEX} if there was no mapping for the key.
*/
int putIfAbsent(TimeAndDims key, int rowIndex);
void clear();
}
static class RollupFactsHolder implements FactsHolder
{
private final boolean sortFacts;
// Can't use Set because we need to be able to get from collection
private final ConcurrentMap<TimeAndDims, TimeAndDims> facts;
private final List<DimensionDesc> dimensionDescsList;
public RollupFactsHolder(boolean sortFacts, Comparator<TimeAndDims> timeAndDimsComparator, List<DimensionDesc> dimensionDescsList)
{
this.sortFacts = sortFacts;
if (sortFacts) {
this.facts = new ConcurrentSkipListMap<>(timeAndDimsComparator);
} else {
this.facts = new ConcurrentHashMap<>();
}
this.dimensionDescsList = dimensionDescsList;
}
@Override
public int getPriorIndex(TimeAndDims key)
{
TimeAndDims timeAndDims = facts.get(key);
return timeAndDims == null ? TimeAndDims.EMPTY_ROW_INDEX : timeAndDims.rowIndex;
}
@Override
public long getMinTimeMillis()
{
if (sortFacts) {
return ((ConcurrentNavigableMap<TimeAndDims, TimeAndDims>) facts).firstKey().getTimestamp();
} else {
throw new UnsupportedOperationException("can't get minTime from unsorted facts data.");
}
}
@Override
public long getMaxTimeMillis()
{
if (sortFacts) {
return ((ConcurrentNavigableMap<TimeAndDims, TimeAndDims>) facts).lastKey().getTimestamp();
} else {
throw new UnsupportedOperationException("can't get maxTime from unsorted facts data.");
}
}
@Override
public Iterator<TimeAndDims> iterator(boolean descending)
{
if (descending && sortFacts) {
return ((ConcurrentNavigableMap<TimeAndDims, TimeAndDims>) facts).descendingMap().keySet().iterator();
}
return keySet().iterator();
}
@Override
public Iterable<TimeAndDims> timeRangeIterable(boolean descending, long timeStart, long timeEnd)
{
if (!sortFacts) {
throw new UnsupportedOperationException("can't get timeRange from unsorted facts data.");
}
TimeAndDims start = new TimeAndDims(timeStart, new Object[]{}, dimensionDescsList);
TimeAndDims end = new TimeAndDims(timeEnd, new Object[]{}, dimensionDescsList);
ConcurrentNavigableMap<TimeAndDims, TimeAndDims> subMap =
((ConcurrentNavigableMap<TimeAndDims, TimeAndDims>) facts).subMap(start, end);
final Map<TimeAndDims, TimeAndDims> rangeMap = descending ? subMap.descendingMap() : subMap;
return rangeMap.keySet();
}
@Override
public Iterable<TimeAndDims> keySet()
{
return facts.keySet();
}
@Override
public int putIfAbsent(TimeAndDims key, int rowIndex)
{
// setRowIndex() must be called before facts.putIfAbsent() for visibility of rowIndex from concurrent readers.
key.setRowIndex(rowIndex);
TimeAndDims prev = facts.putIfAbsent(key, key);
return prev == null ? TimeAndDims.EMPTY_ROW_INDEX : prev.rowIndex;
}
@Override
public void clear()
{
facts.clear();
}
}
static class PlainFactsHolder implements FactsHolder
{
private final boolean sortFacts;
private final ConcurrentMap<Long, Deque<TimeAndDims>> facts;
public PlainFactsHolder(boolean sortFacts)
{
this.sortFacts = sortFacts;
if (sortFacts) {
this.facts = new ConcurrentSkipListMap<>();
} else {
this.facts = new ConcurrentHashMap<>();
}
}
@Override
public int getPriorIndex(TimeAndDims key)
{
// always return EMPTY_ROW_INDEX to indicate that no prior key cause we always add new row
return TimeAndDims.EMPTY_ROW_INDEX;
}
@Override
public long getMinTimeMillis()
{
if (sortFacts) {
return ((ConcurrentNavigableMap<Long, Deque<TimeAndDims>>) facts).firstKey();
} else {
throw new UnsupportedOperationException("can't get minTime from unsorted facts data.");
}
}
@Override
public long getMaxTimeMillis()
{
if (sortFacts) {
return ((ConcurrentNavigableMap<Long, Deque<TimeAndDims>>) facts).lastKey();
} else {
throw new UnsupportedOperationException("can't get maxTime from unsorted facts data.");
}
}
@Override
public Iterator<TimeAndDims> iterator(boolean descending)
{
if (descending && sortFacts) {
return concat(((ConcurrentNavigableMap<Long, Deque<TimeAndDims>>) facts)
.descendingMap().values(), true).iterator();
}
return concat(facts.values(), false).iterator();
}
@Override
public Iterable<TimeAndDims> timeRangeIterable(boolean descending, long timeStart, long timeEnd)
{
ConcurrentNavigableMap<Long, Deque<TimeAndDims>> subMap =
((ConcurrentNavigableMap<Long, Deque<TimeAndDims>>) facts).subMap(timeStart, timeEnd);
final Map<Long, Deque<TimeAndDims>> rangeMap = descending ? subMap.descendingMap() : subMap;
return concat(rangeMap.values(), descending);
}
private Iterable<TimeAndDims> concat(
final Iterable<Deque<TimeAndDims>> iterable,
final boolean descending
)
{
return () -> Iterators.concat(
Iterators.transform(
iterable.iterator(),
input -> descending ? input.descendingIterator() : input.iterator()
)
);
}
@Override
public Iterable<TimeAndDims> keySet()
{
return concat(facts.values(), false);
}
@Override
public int putIfAbsent(TimeAndDims key, int rowIndex)
{
Long time = key.getTimestamp();
Deque<TimeAndDims> rows = facts.get(time);
if (rows == null) {
facts.putIfAbsent(time, new ConcurrentLinkedDeque<>());
// in race condition, rows may be put by other thread, so always get latest status from facts
rows = facts.get(time);
}
// setRowIndex() must be called before rows.add() for visibility of rowIndex from concurrent readers.
key.setRowIndex(rowIndex);
rows.add(key);
// always return EMPTY_ROW_INDEX to indicate that we always add new row
return TimeAndDims.EMPTY_ROW_INDEX;
}
@Override
public void clear()
{
facts.clear();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.crypto.key;
import java.io.IOException;
import java.security.InvalidParameterException;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.KeyProvider.Metadata;
import org.apache.hadoop.crypto.key.KeyProvider.Options;
import org.apache.hadoop.tools.CommandShell;
import org.apache.hadoop.util.ToolRunner;
/**
* This program is the CLI utility for the KeyProvider facilities in Hadoop.
*/
public class KeyShell extends CommandShell {
final static private String USAGE_PREFIX = "Usage: hadoop key " +
"[generic options]\n";
final static private String COMMANDS =
" [-help]\n" +
" [" + CreateCommand.USAGE + "]\n" +
" [" + RollCommand.USAGE + "]\n" +
" [" + DeleteCommand.USAGE + "]\n" +
" [" + ListCommand.USAGE + "]\n" +
" [" + InvalidateCacheCommand.USAGE + "]\n";
private static final String LIST_METADATA = "keyShell.list.metadata";
@VisibleForTesting
public static final String NO_VALID_PROVIDERS =
"There are no valid (non-transient) providers configured.\n" +
"No action has been taken. Use the -provider option to specify\n" +
"a provider. If you want to use a transient provider then you\n" +
"MUST use the -provider argument.";
private boolean interactive = true;
/** If true, fail if the provider requires a password and none is given. */
private boolean strict = false;
private boolean userSuppliedProvider = false;
/**
* Parse the command line arguments and initialize the data.
* <pre>
* % hadoop key create keyName [-size size] [-cipher algorithm]
* [-provider providerPath]
* % hadoop key roll keyName [-provider providerPath]
* % hadoop key list [-provider providerPath]
* % hadoop key delete keyName [-provider providerPath] [-i]
* % hadoop key invalidateCache keyName [-provider providerPath]
* </pre>
* @param args Command line arguments.
* @return 0 on success, 1 on failure.
* @throws IOException
*/
@Override
protected int init(String[] args) throws IOException {
final Options options = KeyProvider.options(getConf());
final Map<String, String> attributes = new HashMap<String, String>();
for (int i = 0; i < args.length; i++) { // parse command line
boolean moreTokens = (i < args.length - 1);
if (args[i].equals("create")) {
String keyName = "-help";
if (moreTokens) {
keyName = args[++i];
}
setSubCommand(new CreateCommand(keyName, options));
if ("-help".equals(keyName)) {
return 1;
}
} else if (args[i].equals("delete")) {
String keyName = "-help";
if (moreTokens) {
keyName = args[++i];
}
setSubCommand(new DeleteCommand(keyName));
if ("-help".equals(keyName)) {
return 1;
}
} else if (args[i].equals("roll")) {
String keyName = "-help";
if (moreTokens) {
keyName = args[++i];
}
setSubCommand(new RollCommand(keyName));
if ("-help".equals(keyName)) {
return 1;
}
} else if ("list".equals(args[i])) {
setSubCommand(new ListCommand());
} else if ("invalidateCache".equals(args[i])) {
String keyName = "-help";
if (moreTokens) {
keyName = args[++i];
}
setSubCommand(new InvalidateCacheCommand(keyName));
if ("-help".equals(keyName)) {
return 1;
}
} else if ("-size".equals(args[i]) && moreTokens) {
options.setBitLength(Integer.parseInt(args[++i]));
} else if ("-cipher".equals(args[i]) && moreTokens) {
options.setCipher(args[++i]);
} else if ("-description".equals(args[i]) && moreTokens) {
options.setDescription(args[++i]);
} else if ("-attr".equals(args[i]) && moreTokens) {
final String attrval[] = args[++i].split("=", 2);
final String attr = attrval[0].trim();
final String val = attrval[1].trim();
if (attr.isEmpty() || val.isEmpty()) {
getOut().println("\nAttributes must be in attribute=value form, " +
"or quoted\nlike \"attribute = value\"\n");
return 1;
}
if (attributes.containsKey(attr)) {
getOut().println("\nEach attribute must correspond to only one " +
"value:\nattribute \"" + attr + "\" was repeated\n");
return 1;
}
attributes.put(attr, val);
} else if ("-provider".equals(args[i]) && moreTokens) {
userSuppliedProvider = true;
getConf().set(KeyProviderFactory.KEY_PROVIDER_PATH, args[++i]);
} else if ("-metadata".equals(args[i])) {
getConf().setBoolean(LIST_METADATA, true);
} else if ("-f".equals(args[i]) || ("-force".equals(args[i]))) {
interactive = false;
} else if (args[i].equals("-strict")) {
strict = true;
} else if ("-help".equals(args[i])) {
return 1;
} else {
ToolRunner.printGenericCommandUsage(getErr());
return 1;
}
}
if (!attributes.isEmpty()) {
options.setAttributes(attributes);
}
return 0;
}
@Override
public String getCommandUsage() {
StringBuffer sbuf = new StringBuffer(USAGE_PREFIX + COMMANDS);
String banner = StringUtils.repeat("=", 66);
sbuf.append(banner + "\n");
sbuf.append(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC + "\n");
sbuf.append(banner + "\n");
sbuf.append(RollCommand.USAGE + ":\n\n" + RollCommand.DESC + "\n");
sbuf.append(banner + "\n");
sbuf.append(DeleteCommand.USAGE + ":\n\n" + DeleteCommand.DESC + "\n");
sbuf.append(banner + "\n");
sbuf.append(ListCommand.USAGE + ":\n\n" + ListCommand.DESC + "\n");
sbuf.append(banner + "\n");
sbuf.append(InvalidateCacheCommand.USAGE + ":\n\n"
+ InvalidateCacheCommand.DESC + "\n");
return sbuf.toString();
}
private abstract class Command extends SubCommand {
protected KeyProvider provider = null;
protected KeyProvider getKeyProvider() {
KeyProvider prov = null;
List<KeyProvider> providers;
try {
providers = KeyProviderFactory.getProviders(getConf());
if (userSuppliedProvider) {
prov = providers.get(0);
} else {
for (KeyProvider p : providers) {
if (!p.isTransient()) {
prov = p;
break;
}
}
}
} catch (IOException e) {
e.printStackTrace(getErr());
}
if (prov == null) {
getOut().println(NO_VALID_PROVIDERS);
}
return prov;
}
protected void printProviderWritten() {
getOut().println(provider + " has been updated.");
}
protected void warnIfTransientProvider() {
if (provider.isTransient()) {
getOut().println("WARNING: you are modifying a transient provider.");
}
}
public abstract void execute() throws Exception;
public abstract String getUsage();
}
private class ListCommand extends Command {
public static final String USAGE =
"list [-provider <provider>] [-strict] [-metadata] [-help]";
public static final String DESC =
"The list subcommand displays the keynames contained within\n" +
"a particular provider as configured in core-site.xml or\n" +
"specified with the -provider argument. -metadata displays\n" +
"the metadata. If -strict is supplied, fail immediately if\n" +
"the provider requires a password and none is given.";
private boolean metadata = false;
public boolean validate() {
boolean rc = true;
provider = getKeyProvider();
if (provider == null) {
rc = false;
}
metadata = getConf().getBoolean(LIST_METADATA, false);
return rc;
}
public void execute() throws IOException {
try {
final List<String> keys = provider.getKeys();
getOut().println("Listing keys for KeyProvider: " + provider);
if (metadata) {
final Metadata[] meta =
provider.getKeysMetadata(keys.toArray(new String[keys.size()]));
for (int i = 0; i < meta.length; ++i) {
getOut().println(keys.get(i) + " : " + meta[i]);
}
} else {
for (String keyName : keys) {
getOut().println(keyName);
}
}
} catch (IOException e) {
getOut().println("Cannot list keys for KeyProvider: " + provider);
throw e;
}
}
@Override
public String getUsage() {
return USAGE + ":\n\n" + DESC;
}
}
private class RollCommand extends Command {
public static final String USAGE =
"roll <keyname> [-provider <provider>] [-strict] [-help]";
public static final String DESC =
"The roll subcommand creates a new version for the specified key\n" +
"within the provider indicated using the -provider argument.\n" +
"If -strict is supplied, fail immediately if the provider requires\n" +
"a password and none is given.";
private String keyName = null;
public RollCommand(String keyName) {
this.keyName = keyName;
}
public boolean validate() {
boolean rc = true;
provider = getKeyProvider();
if (provider == null) {
rc = false;
}
if (keyName == null) {
getOut().println("Please provide a <keyname>.\n" +
"See the usage description by using -help.");
rc = false;
}
return rc;
}
public void execute() throws NoSuchAlgorithmException, IOException {
try {
warnIfTransientProvider();
getOut().println("Rolling key version from KeyProvider: "
+ provider + "\n for key name: " + keyName);
try {
provider.rollNewVersion(keyName);
provider.flush();
getOut().println(keyName + " has been successfully rolled.");
printProviderWritten();
} catch (NoSuchAlgorithmException e) {
getOut().println("Cannot roll key: " + keyName +
" within KeyProvider: " + provider + ".");
throw e;
}
} catch (IOException e1) {
getOut().println("Cannot roll key: " + keyName + " within KeyProvider: "
+ provider + ".");
throw e1;
}
}
@Override
public String getUsage() {
return USAGE + ":\n\n" + DESC;
}
}
private class DeleteCommand extends Command {
public static final String USAGE =
"delete <keyname> [-provider <provider>] [-strict] [-f] [-help]";
public static final String DESC =
"The delete subcommand deletes all versions of the key\n" +
"specified by the <keyname> argument from within the\n" +
"provider specified by -provider. The command asks for\n" +
"user confirmation unless -f is specified. If -strict is\n" +
"supplied, fail immediately if the provider requires a\n" +
"password and none is given.";
private String keyName = null;
private boolean cont = true;
public DeleteCommand(String keyName) {
this.keyName = keyName;
}
@Override
public boolean validate() {
provider = getKeyProvider();
if (provider == null) {
return false;
}
if (keyName == null) {
getOut().println("There is no keyName specified. Please specify a " +
"<keyname>. See the usage description with -help.");
return false;
}
if (interactive) {
try {
cont = ToolRunner
.confirmPrompt("You are about to DELETE all versions of "
+ " key " + keyName + " from KeyProvider "
+ provider + ". Continue? ");
if (!cont) {
getOut().println(keyName + " has not been deleted.");
}
return cont;
} catch (IOException e) {
getOut().println(keyName + " will not be deleted. "
+ prettifyException(e));
}
}
return true;
}
public void execute() throws IOException {
warnIfTransientProvider();
getOut().println("Deleting key: " + keyName + " from KeyProvider: "
+ provider);
if (cont) {
try {
provider.deleteKey(keyName);
provider.flush();
getOut().println(keyName + " has been successfully deleted.");
printProviderWritten();
} catch (IOException e) {
getOut().println(keyName + " has not been deleted.");
throw e;
}
}
}
@Override
public String getUsage() {
return USAGE + ":\n\n" + DESC;
}
}
private class CreateCommand extends Command {
public static final String USAGE =
"create <keyname> [-cipher <cipher>] [-size <size>]\n" +
" [-description <description>]\n" +
" [-attr <attribute=value>]\n" +
" [-provider <provider>] [-strict]\n" +
" [-help]";
public static final String DESC =
"The create subcommand creates a new key for the name specified\n" +
"by the <keyname> argument within the provider specified by the\n" +
"-provider argument. You may specify a cipher with the -cipher\n" +
"argument. The default cipher is currently \"AES/CTR/NoPadding\".\n" +
"The default keysize is 128. You may specify the requested key\n" +
"length using the -size argument. Arbitrary attribute=value\n" +
"style attributes may be specified using the -attr argument.\n" +
"-attr may be specified multiple times, once per attribute.\n";
private final String keyName;
private final Options options;
public CreateCommand(String keyName, Options options) {
this.keyName = keyName;
this.options = options;
}
public boolean validate() {
boolean rc = true;
try {
provider = getKeyProvider();
if (provider == null) {
rc = false;
} else if (provider.needsPassword()) {
if (strict) {
getOut().println(provider.noPasswordError());
rc = false;
} else {
getOut().println(provider.noPasswordWarning());
}
}
} catch (IOException e) {
e.printStackTrace(getErr());
}
if (keyName == null) {
getOut().println("Please provide a <keyname>. " +
" See the usage description with -help.");
rc = false;
}
return rc;
}
public void execute() throws IOException, NoSuchAlgorithmException {
warnIfTransientProvider();
try {
provider.createKey(keyName, options);
provider.flush();
getOut().println(keyName + " has been successfully created " +
"with options " + options.toString() + ".");
printProviderWritten();
} catch (InvalidParameterException e) {
getOut().println(keyName + " has not been created.");
throw e;
} catch (IOException e) {
getOut().println(keyName + " has not been created.");
throw e;
} catch (NoSuchAlgorithmException e) {
getOut().println(keyName + " has not been created.");
throw e;
}
}
@Override
public String getUsage() {
return USAGE + ":\n\n" + DESC;
}
}
private class InvalidateCacheCommand extends Command {
public static final String USAGE =
"invalidateCache <keyname> [-provider <provider>] [-help]";
public static final String DESC =
"The invalidateCache subcommand invalidates the cached key versions\n"
+ "of the specified key, on the provider indicated using the"
+ " -provider argument.\n";
private String keyName = null;
InvalidateCacheCommand(String keyName) {
this.keyName = keyName;
}
public boolean validate() {
boolean rc = true;
provider = getKeyProvider();
if (provider == null) {
getOut().println("Invalid provider.");
rc = false;
}
if (keyName == null) {
getOut().println("Please provide a <keyname>.\n" +
"See the usage description by using -help.");
rc = false;
}
return rc;
}
public void execute() throws NoSuchAlgorithmException, IOException {
try {
warnIfTransientProvider();
getOut().println("Invalidating cache on KeyProvider: "
+ provider + "\n for key name: " + keyName);
provider.invalidateCache(keyName);
getOut().println("Cached keyversions of " + keyName
+ " has been successfully invalidated.");
printProviderWritten();
} catch (IOException e) {
getOut().println("Cannot invalidate cache for key: " + keyName +
" within KeyProvider: " + provider + ".");
throw e;
}
}
@Override
public String getUsage() {
return USAGE + ":\n\n" + DESC;
}
}
@Override
protected void printException(Exception e){
getErr().println("Executing command failed with " +
"the following exception: " + prettifyException(e));
}
private String prettifyException(Exception e) {
return e.getClass().getSimpleName() + ": " +
e.getLocalizedMessage().split("\n")[0];
}
/**
* main() entry point for the KeyShell. While strictly speaking the
* return is void, it will System.exit() with a return code: 0 is for
* success and 1 for failure.
*
* @param args Command line arguments.
* @throws Exception
*/
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new KeyShell(), args);
System.exit(res);
}
}
| |
/*
Derby - Class org.apache.derby.impl.store.raw.data.RFResource
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.store.raw.data;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.reference.SQLState;
import org.apache.derby.iapi.services.context.ContextManager;
import org.apache.derby.iapi.services.context.ContextService;
import org.apache.derby.iapi.services.daemon.Serviceable;
import org.apache.derby.iapi.store.access.AccessFactoryGlobals;
import org.apache.derby.iapi.store.access.FileResource;
import org.apache.derby.iapi.store.raw.xact.RawTransaction;
import org.apache.derby.io.StorageFile;
class RFResource implements FileResource {
private final BaseDataFileFactory factory;
RFResource(BaseDataFileFactory dataFactory) {
this.factory = dataFactory;
}
/**
@see FileResource#add
@exception StandardException Oops
*/
public long add(String name, InputStream source)
throws StandardException
{
OutputStream os = null;
if (factory.isReadOnly())
{
throw StandardException.newException(SQLState.FILE_READ_ONLY);
}
long generationId = factory.getNextId();
try
{
StorageFile file = getAsFile(name, generationId);
if (file.exists())
{
throw StandardException.newException(
SQLState.FILE_EXISTS, file);
}
ContextManager cm =
ContextService.getFactory().getCurrentContextManager();
RawTransaction tran =
factory.getRawStoreFactory().getXactFactory().findUserTransaction(
factory.getRawStoreFactory(),
cm,
AccessFactoryGlobals.USER_TRANS_NAME);
// Block the backup, If backup is already in progress wait
// for the backup to finish. Jar files are unlogged but the
// changes to the references to the jar file in the catalogs
// is logged. A consistent backup can not be made when jar file
// is being added.
tran.blockBackup(true);
StorageFile directory = file.getParentDir();
if (!directory.exists())
{
if (!directory.mkdirs())
{
throw StandardException.newException(
SQLState.FILE_CANNOT_CREATE_SEGMENT, directory);
}
}
os = file.getOutputStream();
byte[] data = new byte[4096];
int len;
factory.writeInProgress();
try
{
while ((len = source.read(data)) != -1) {
os.write(data, 0, len);
}
factory.writableStorageFactory.sync( os, false);
}
finally
{
factory.writeFinished();
}
}
catch (IOException ioe)
{
throw StandardException.newException(
SQLState.FILE_UNEXPECTED_EXCEPTION, ioe);
}
finally
{
try {
if (os != null) {
os.close();
}
} catch (IOException ioe2) {/*RESOLVE: Why ignore this?*/}
try {
if (source != null)source.close();
} catch (IOException ioe2) {/* RESOLVE: Why ignore this?*/}
}
return generationId;
}
/**
@see FileResource#remove
@exception StandardException Oops
*/
public void remove(String name, long currentGenerationId)
throws StandardException
{
if (factory.isReadOnly())
throw StandardException.newException(SQLState.FILE_READ_ONLY);
ContextManager cm = ContextService.getFactory().getCurrentContextManager();
RawTransaction tran =
factory.getRawStoreFactory().getXactFactory().findUserTransaction(
factory.getRawStoreFactory(),
cm,
AccessFactoryGlobals.USER_TRANS_NAME);
// Block the backup, If backup is already in progress wait
// for the backup to finish. Jar files are unlogged but the
// changes to the references to the jar file in the catalogs
// is logged. A consistent backup can not be made when jar file
// is being removed.
tran.blockBackup(true);
tran.logAndDo(new RemoveFileOperation(name, currentGenerationId, true));
Serviceable s = new RemoveFile(getAsFile(name, currentGenerationId));
tran.addPostCommitWork(s);
}
/**
@see FileResource#replace
@exception StandardException Oops
*/
public long replace(String name, long currentGenerationId, InputStream source)
throws StandardException
{
if (factory.isReadOnly())
throw StandardException.newException(SQLState.FILE_READ_ONLY);
remove(name, currentGenerationId);
long generationId = add(name, source);
return generationId;
}
/**
@see FileResource#getAsFile
*/
public StorageFile getAsFile(String name, long generationId)
{
String versionedFileName = factory.getVersionedName(name, generationId);
return factory.storageFactory.newStorageFile( versionedFileName);
}
public char getSeparatorChar()
{
return factory.storageFactory.getSeparator();
}
} // end of class RFResource
final class RemoveFile implements Serviceable, PrivilegedExceptionAction
{
private final StorageFile fileToGo;
RemoveFile(StorageFile fileToGo)
{
this.fileToGo = fileToGo;
}
public int performWork(ContextManager context)
throws StandardException
{
try {
AccessController.doPrivileged(this);
} catch (PrivilegedActionException e) {
throw (StandardException) (e.getException());
}
return Serviceable.DONE;
}
public boolean serviceASAP()
{
return false;
}
/**
* File deletion is a quick operation and typically releases substantial
* amount of space very quickly, this work should be done on the
* user thread.
* @return true, this work needs to done on user thread.
*/
public boolean serviceImmediately()
{
return true;
}
public Object run() throws StandardException {
// SECURITY PERMISSION - MP1, OP5
if (fileToGo.exists()) {
if (!fileToGo.delete()) {
throw StandardException.newException(
SQLState.FILE_CANNOT_REMOVE_FILE, fileToGo);
}
}
return null;
}
}
| |
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.system;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
/**
* <code>AppSettings</code> provides a store of configuration
* to be used by the application.
* <p>
* By default only the {@link JmeContext context} uses the configuration,
* however the user may set and retrieve the settings as well.
* The settings can be stored either in the Java preferences
* (using {@link #save(java.lang.String) } or
* a .properties file (using {@link #save(java.io.OutputStream) }.
*
* @author Kirill Vainer
*/
public final class AppSettings extends HashMap<String, Object> {
private static final AppSettings defaults = new AppSettings(false);
/**
* Use LWJGL as the display system and force using the OpenGL2.0 renderer.
* <p>
* If the underlying system does not support OpenGL2.0, then the context
* initialization will throw an exception.
*
* @see AppSettings#setRenderer(java.lang.String)
*/
public static final String LWJGL_OPENGL2 = "LWJGL-OpenGL2";
/**
* Use LWJGL as the display system and force using the core OpenGL3.3 renderer.
* <p>
* If the underlying system does not support OpenGL3.2, then the context
* initialization will throw an exception. Note that currently jMonkeyEngine
* does not have any shaders that support OpenGL3.2 therefore this
* option is not useful.
* <p>
* Note: OpenGL 3.2 is used to give 3.x support to Mac users.
*
* @see AppSettings#setRenderer(java.lang.String)
*/
public static final String LWJGL_OPENGL3 = "LWJGL-OpenGL3";
/**
* Use the LWJGL OpenAL based renderer for audio capabilities.
*
* @see AppSettings#setAudioRenderer(java.lang.String)
*/
public static final String LWJGL_OPENAL = "LWJGL";
/**
* Use the Android MediaPlayer / SoundPool based renderer for Android audio capabilities.
* <p>
* NOTE: Supports Android 2.2+ platforms.
*
* @see AppSettings#setAudioRenderer(java.lang.String)
* @deprecated This audio renderer has too many limitations.
* use {@link #ANDROID_OPENAL_SOFT} instead.
*/
@Deprecated
public static final String ANDROID_MEDIAPLAYER = "MediaPlayer";
/**
* Use the OpenAL Soft based renderer for Android audio capabilities.
* <p>
* This is the current default for Android platforms.
* NOTE: Only to be used on Android 2.3+ platforms due to using OpenSL.
*
* @see AppSettings#setAudioRenderer(java.lang.String)
*/
public static final String ANDROID_OPENAL_SOFT = "OpenAL_SOFT";
static {
defaults.put("Width", 640);
defaults.put("Height", 480);
defaults.put("BitsPerPixel", 24);
defaults.put("Frequency", 60);
defaults.put("DepthBits", 24);
defaults.put("StencilBits", 0);
defaults.put("Samples", 0);
defaults.put("Fullscreen", false);
defaults.put("Title", "jMonkey Engine 3.0");
defaults.put("Renderer", LWJGL_OPENGL2);
defaults.put("AudioRenderer", LWJGL_OPENAL);
defaults.put("DisableJoysticks", true);
defaults.put("UseInput", true);
defaults.put("VSync", false);
defaults.put("FrameRate", -1);
defaults.put("SettingsDialogImage", "/com/jme3/app/Monkey.png");
defaults.put("MinHeight", 0);
defaults.put("MinWidth", 0);
defaults.put("GammaCorrection", false);
defaults.put("Resizable", false);
defaults.put("SwapBuffers", true);
// defaults.put("Icons", null);
}
/**
* Create a new instance of <code>AppSettings</code>.
* <p>
* If <code>loadDefaults</code> is true, then the default settings
* will be set on the AppSettings.
* Use false if you want to change some settings but you would like the
* application to load settings from previous launches.
*
* @param loadDefaults If default settings are to be loaded.
*/
public AppSettings(boolean loadDefaults) {
if (loadDefaults) {
putAll(defaults);
}
}
/**
* Copies all settings from <code>other</code> to <code>this</code>
* AppSettings.
* <p>
* Any settings that are specified in other will overwrite settings
* set on this AppSettings.
*
* @param other The AppSettings to copy the settings from
*/
public void copyFrom(AppSettings other) {
this.putAll(other);
}
/**
* Same as {@link #copyFrom(com.jme3.system.AppSettings) }, except
* doesn't overwrite settings that are already set.
*
* @param other The AppSettings to merge the settings from
*/
public void mergeFrom(AppSettings other) {
for (String key : other.keySet()) {
if (get(key) == null) {
put(key, other.get(key));
}
}
}
/**
* Loads the settings from the given properties input stream.
*
* @param in The InputStream to load from
* @throws IOException If an IOException occurs
*
* @see #save(java.io.OutputStream)
*/
public void load(InputStream in) throws IOException {
Properties props = new Properties();
props.load(in);
for (Map.Entry<Object, Object> entry : props.entrySet()) {
String key = (String) entry.getKey();
String val = (String) entry.getValue();
if (val != null) {
val = val.trim();
}
if (key.endsWith("(int)")) {
key = key.substring(0, key.length() - 5);
int iVal = Integer.parseInt(val);
putInteger(key, iVal);
} else if (key.endsWith("(string)")) {
putString(key.substring(0, key.length() - 8), val);
} else if (key.endsWith("(bool)")) {
boolean bVal = Boolean.parseBoolean(val);
putBoolean(key.substring(0, key.length() - 6), bVal);
} else if (key.endsWith("(float)")) {
float fVal = Float.parseFloat(val);
putFloat(key.substring(0, key.length() - 7), fVal);
} else {
throw new IOException("Cannot parse key: " + key);
}
}
}
/**
* Saves all settings to the given properties output stream.
*
* @param out The OutputStream to write to
* @throws IOException If an IOException occurs
*
* @see #load(java.io.InputStream)
*/
public void save(OutputStream out) throws IOException {
Properties props = new Properties();
for (Map.Entry<String, Object> entry : entrySet()) {
Object val = entry.getValue();
String type;
if (val instanceof Integer) {
type = "(int)";
} else if (val instanceof String) {
type = "(string)";
} else if (val instanceof Boolean) {
type = "(bool)";
} else if (val instanceof Float) {
type = "(float)";
} else {
// See the note in the AppSettings.save(String)
// method regarding object type settings.
continue;
}
props.setProperty(entry.getKey() + type, val.toString());
}
props.store(out, "jME3 AppSettings");
}
/**
* Loads settings previously saved in the Java preferences.
*
* @param preferencesKey The preferencesKey previously used to save the settings.
* @throws BackingStoreException If an exception occurs with the preferences
*
* @see #save(java.lang.String)
*/
public void load(String preferencesKey) throws BackingStoreException {
Preferences prefs = Preferences.userRoot().node(preferencesKey);
String[] keys = prefs.keys();
if (keys != null) {
for (String key : keys) {
if (key.charAt(1) == '_') {
// Try loading using new method
switch (key.charAt(0)) {
case 'I':
put(key.substring(2), prefs.getInt(key, (Integer) 0));
break;
case 'F':
put(key.substring(2), prefs.getFloat(key, (Float) 0f));
break;
case 'S':
put(key.substring(2), prefs.get(key, (String) null));
break;
case 'B':
put(key.substring(2), prefs.getBoolean(key, (Boolean) false));
break;
default:
throw new UnsupportedOperationException("Undefined setting type: " + key.charAt(0));
}
} else {
// Use old method for compatibility with older preferences
// TODO: Remove when no longer neccessary
Object defaultValue = defaults.get(key);
if (defaultValue instanceof Integer) {
put(key, prefs.getInt(key, (Integer) defaultValue));
} else if (defaultValue instanceof String) {
put(key, prefs.get(key, (String) defaultValue));
} else if (defaultValue instanceof Boolean) {
put(key, prefs.getBoolean(key, (Boolean) defaultValue));
}
}
}
}
}
/**
* Saves settings into the Java preferences.
* <p>
* On the Windows operating system, the preferences are saved in the registry
* at the following key:<br>
* <code>HKEY_CURRENT_USER\Software\JavaSoft\Prefs\[preferencesKey]</code>
*
* @param preferencesKey The preferences key to save at. Generally the
* application's unique name.
*
* @throws BackingStoreException If an exception occurs with the preferences
*/
public void save(String preferencesKey) throws BackingStoreException {
Preferences prefs = Preferences.userRoot().node(preferencesKey);
// Clear any previous settings set before saving, this will
// purge any other parameters set in older versions of the app, so
// that they don't leak onto the AppSettings of newer versions.
prefs.clear();
for (String key : keySet()) {
Object val = get(key);
if (val instanceof Integer) {
prefs.putInt("I_" + key, (Integer) val);
} else if (val instanceof Float) {
prefs.putFloat("F_" + key, (Float) val);
} else if (val instanceof String) {
prefs.put("S_" + key, (String) val);
} else if (val instanceof Boolean) {
prefs.putBoolean("B_" + key, (Boolean) val);
}
// NOTE: Ignore any parameters of unsupported types instead
// of throwing exception. This is specifically for handling
// BufferedImage which is used in setIcons(), as you do not
// want to export such data in the preferences.
}
// Ensure the data is properly written into preferences before
// continuing.
prefs.sync();
}
/**
* Get an integer from the settings.
* <p>
* If the key is not set, then 0 is returned.
*/
public int getInteger(String key) {
Integer i = (Integer) get(key);
if (i == null) {
return 0;
}
return i.intValue();
}
/**
* Get a boolean from the settings.
* <p>
* If the key is not set, then false is returned.
*/
public boolean getBoolean(String key) {
Boolean b = (Boolean) get(key);
if (b == null) {
return false;
}
return b.booleanValue();
}
/**
* Get a string from the settings.
* <p>
* If the key is not set, then null is returned.
*/
public String getString(String key) {
String s = (String) get(key);
if (s == null) {
return null;
}
return s;
}
/**
* Get a float from the settings.
* <p>
* If the key is not set, then 0.0 is returned.
*/
public float getFloat(String key) {
Float f = (Float) get(key);
if (f == null) {
return 0f;
}
return f.floatValue();
}
/**
* Set an integer on the settings.
*/
public void putInteger(String key, int value) {
put(key, Integer.valueOf(value));
}
/**
* Set a boolean on the settings.
*/
public void putBoolean(String key, boolean value) {
put(key, Boolean.valueOf(value));
}
/**
* Set a string on the settings.
*/
public void putString(String key, String value) {
put(key, value);
}
/**
* Set a float on the settings.
*/
public void putFloat(String key, float value) {
put(key, Float.valueOf(value));
}
/**
* Enable or disable mouse emulation on touchscreen based devices.
* This will convert taps on the touchscreen or movement of finger
* over touchscreen (only the first) into the appropriate mouse events.
*
* @param emulateMouse If mouse emulation should be enabled.
*/
public void setEmulateMouse(boolean emulateMouse) {
putBoolean("TouchEmulateMouse", emulateMouse);
}
/**
* Returns true if mouse emulation is enabled, false otherwise.
*
* @return Mouse emulation mode.
*/
public boolean isEmulateMouse() {
return getBoolean("TouchEmulateMouse");
}
/**
* Specify if the X or Y (or both) axes should be flipped for emulated mouse.
*
* @param flipX Set to flip X axis
* @param flipY Set to flip Y axis
*
* @see #setEmulateMouse(boolean)
*/
public void setEmulateMouseFlipAxis(boolean flipX, boolean flipY) {
putBoolean("TouchEmulateMouseFlipX", flipX);
putBoolean("TouchEmulateMouseFlipY", flipY);
}
public boolean isEmulateMouseFlipX() {
return getBoolean("TouchEmulateMouseFlipX");
}
public boolean isEmulateMouseFlipY() {
return getBoolean("TouchEmulateMouseFlipY");
}
/**
* Enable or disable keyboard emulation on touchscreen based devices.
* This will convert soft keyboard key presses on the touchscreen
* into the appropriate key events.
*
* @param emulateKeyboard If soft keyboard emulation should be enabled.
*/
public void setEmulateKeyboard(boolean emulateKeyboard) {
putBoolean("TouchEmulateKeyboard", emulateKeyboard);
}
/**
* Returns true if keyboard emulation is enabled, false otherwise.
*
* @return Soft keyboard emulation mode.
*/
public boolean isEmulateKeyboard() {
return getBoolean("TouchEmulateKeyboard");
}
/**
* @param frameRate The frame-rate is the upper limit on how high
* the application's frames-per-second can go.
* (Default: -1 no frame rate limit imposed)
*/
public void setFrameRate(int frameRate) {
putInteger("FrameRate", frameRate);
}
/**
* @param use If true, the application will initialize and use input.
* Set to false for headless applications that do not require keyboard
* or mouse input.
* (Default: true)
*/
public void setUseInput(boolean use) {
putBoolean("UseInput", use);
}
/**
* @param use If true, the application will initialize and use joystick
* input. Set to false if no joystick input is desired.
* (Default: false)
*/
public void setUseJoysticks(boolean use) {
putBoolean("DisableJoysticks", !use);
}
/**
* Set the graphics renderer to use, one of:<br>
* <ul>
* <li>AppSettings.LWJGL_OPENGL1 - Force OpenGL1.1 compatability</li>
* <li>AppSettings.LWJGL_OPENGL2 - Force OpenGL2 compatability</li>
* <li>AppSettings.LWJGL_OPENGL3 - Force OpenGL3.3 compatability</li>
* <li>AppSettings.LWJGL_OPENGL_ANY - Choose an appropriate
* OpenGL version based on system capabilities</li>
* <li>null - Disable graphics rendering</li>
* </ul>
* @param renderer The renderer to set
* (Default: AppSettings.LWJGL_OPENGL2)
*/
public void setRenderer(String renderer) {
putString("Renderer", renderer);
}
/**
* Set a custom graphics renderer to use. The class should implement
* the {@link JmeContext} interface.
* @param clazz The custom context class.
* (Default: not set)
*/
public void setCustomRenderer(Class<? extends JmeContext> clazz){
put("Renderer", "CUSTOM" + clazz.getName());
}
/**
* Set the audio renderer to use. One of:<br>
* <ul>
* <li>AppSettings.LWJGL_OPENAL - Default for LWJGL</li>
* <li>null - Disable audio</li>
* </ul>
* @param audioRenderer
* (Default: LWJGL)
*/
public void setAudioRenderer(String audioRenderer) {
putString("AudioRenderer", audioRenderer);
}
/**
* @param value the width for the rendering display.
* (Default: 640)
*/
public void setWidth(int value) {
putInteger("Width", value);
}
/**
* @param value the height for the rendering display.
* (Default: 480)
*/
public void setHeight(int value) {
putInteger("Height", value);
}
/**
* Set the resolution for the rendering display
* @param width The width
* @param height The height
* (Default: 640x480)
*/
public void setResolution(int width, int height) {
setWidth(width);
setHeight(height);
}
/**
* @param value the minimum width the settings window will allow for the rendering display.
* (Default: 0)
*/
public void setMinWidth(int value) {
putInteger("MinWidth", value);
}
/**
* @param value the minimum height the settings window will allow for the rendering display.
* (Default: 0)
*/
public void setMinHeight(int value) {
putInteger("MinHeight", value);
}
/**
* Set the minimum resolution the settings window will allow for the rendering display
* @param width The minimum width
* @param height The minimum height
* (Default: 0x0)
*/
public void setMinResolution(int width, int height) {
setMinWidth(width);
setMinHeight(height);
}
/**
* Set the frequency, also known as refresh rate, for the
* rendering display.
* @param value The frequency
* (Default: 60)
*/
public void setFrequency(int value) {
putInteger("Frequency", value);
}
/**
* Sets the number of depth bits to use.
* <p>
* The number of depth bits specifies the precision of the depth buffer.
* To increase precision, specify 32 bits. To decrease precision, specify
* 16 bits. On some platforms 24 bits might not be supported, in that case,
* specify 16 bits.<p>
* (Default: 24)
*
* @param value The depth bits
*/
public void setDepthBits(int value){
putInteger("DepthBits", value);
}
/**
* Android Only
* Sets the number of alpha bits to use.
* <p>
* The number of alpha bits specifies the precision of the surface view
* background alpha value. To set the surface view to opaque (fastest setting),
* leave the number of alpha bits = 0. This will cause faster rendering,
* but android views located behind the surface view will not be viewable.
* To set the surface view to translucent, set the number of alphaBits to 8
* or higher. Values less than 8 (except 0) will set the surface view pixel
* format to transparent. <p>
* (Default: 0)
*
* @param value The alpha bits
*/
public void setAlphaBits(int value){
putInteger("AlphaBits", value);
}
/**
* Set the number of stencil bits.
* <p>
* This value is only relevant when the stencil buffer is being used.
* Specify 8 to indicate an 8-bit stencil buffer, specify 0 to disable
* the stencil buffer.
* </p>
* (Default: 0)
*
* @param value Number of stencil bits
*/
public void setStencilBits(int value){
putInteger("StencilBits", value);
}
/**
* Set the bits per pixel for the display. Appropriate
* values are 16 for RGB565 color format, or 24 for RGB8 color format.
*
* @param value The bits per pixel to set
* (Default: 24)
*/
public void setBitsPerPixel(int value) {
putInteger("BitsPerPixel", value);
}
/**
* Set the number of samples per pixel. A value of 1 indicates
* each pixel should be single-sampled, higher values indicate
* a pixel should be multi-sampled.
*
* @param value The number of samples
* (Default: 1)
*/
public void setSamples(int value) {
putInteger("Samples", value);
}
/**
* @param title The title of the rendering display
* (Default: jMonkeyEngine 3.0)
*/
public void setTitle(String title) {
putString("Title", title);
}
/**
* @param value true to enable full-screen rendering, false to render in a window
* (Default: false)
*/
public void setFullscreen(boolean value) {
putBoolean("Fullscreen", value);
}
/**
* Set to true to enable vertical-synchronization, limiting and synchronizing
* every frame rendered to the monitor's refresh rate.
* @param value
* (Default: false)
*/
public void setVSync(boolean value) {
putBoolean("VSync", value);
}
/**
* Enable 3D stereo.
* <p>This feature requires hardware support from the GPU driver.
* @see <a href="http://en.wikipedia.org/wiki/Quad_buffering">http://en.wikipedia.org/wiki/Quad_buffering</a><br />
* Once enabled, filters or scene processors that handle 3D stereo rendering
* could use this feature to render using hardware 3D stereo.</p>
* (Default: false)
*/
public void setStereo3D(boolean value){
putBoolean("Stereo3D", value);
}
/**
* Sets the application icons to be used, with the most preferred first.
* For Windows you should supply at least one 16x16 icon and one 32x32. The former is used for the title/task bar,
* the latter for the alt-tab icon.
* Linux (and similar platforms) expect one 32x32 icon.
* Mac OS X should be supplied one 128x128 icon.
* <br/>
* The icon is used for the settings window, and the LWJGL render window. Not currently supported for JOGL.
* Note that a bug in Java 6 (bug ID 6445278, currently hidden but available in Google cache) currently prevents
* the icon working for alt-tab on the settings dialog in Windows.
*
* @param value An array of BufferedImages to use as icons.
* (Default: not set)
*/
public void setIcons(Object[] value) {
put("Icons", value);
}
/**
* Sets the path of the settings dialog image to use.
* <p>
* The image will be displayed in the settings dialog when the
* application is started.
* </p>
* (Default: /com/jme3/app/Monkey.png)
*
* @param path The path to the image in the classpath.
*/
public void setSettingsDialogImage(String path) {
putString("SettingsDialogImage", path);
}
/**
* Enables Gamma Correction
* This requires that the GPU supports GL_ARB_framebuffer_sRGB and will
* disabled otherwise.
* @param gammaCorrection
* (Default : true)
*/
public void setGammaCorrection(boolean gammaCorrection) {
putBoolean("GammaCorrection", gammaCorrection);
}
/**
* Get the framerate.
* @see #setFrameRate(int)
*/
public int getFrameRate() {
return getInteger("FrameRate");
}
/**
* Get the use input state.
* @see #setUseInput(boolean)
*/
public boolean useInput() {
return getBoolean("UseInput");
}
/**
* Get the renderer
* @see #setRenderer(java.lang.String)
*/
public String getRenderer() {
return getString("Renderer");
}
/**
* Get the width
* @see #setWidth(int)
*/
public int getWidth() {
return getInteger("Width");
}
/**
* Get the height
* @see #setHeight(int)
*/
public int getHeight() {
return getInteger("Height");
}
/**
* Get the width
* @see #setWidth(int)
*/
public int getMinWidth() {
return getInteger("MinWidth");
}
/**
* Get the height
* @see #setHeight(int)
*/
public int getMinHeight() {
return getInteger("MinHeight");
}
/**
* Get the bits per pixel
* @see #setBitsPerPixel(int)
*/
public int getBitsPerPixel() {
return getInteger("BitsPerPixel");
}
/**
* Get the frequency
* @see #setFrequency(int)
*/
public int getFrequency() {
return getInteger("Frequency");
}
/**
* Get the number of depth bits
* @see #setDepthBits(int)
*/
public int getDepthBits() {
return getInteger("DepthBits");
}
/**
* Android Only
* Get the number of alpha bits for the surface view to use.
* @see #setAlphaBits(int)
*/
public int getAlphaBits() {
return getInteger("AlphaBits");
}
/**
* Get the number of stencil bits
* @see #setStencilBits(int)
*/
public int getStencilBits() {
return getInteger("StencilBits");
}
/**
* Get the number of samples
* @see #setSamples(int)
*/
public int getSamples() {
return getInteger("Samples");
}
/**
* Get the application title
* @see #setTitle(java.lang.String)
*/
public String getTitle() {
return getString("Title");
}
/**
* Get the vsync state
* @see #setVSync(boolean)
*/
public boolean isVSync() {
return getBoolean("VSync");
}
/**
* Get the fullscreen state
* @see #setFullscreen(boolean)
*/
public boolean isFullscreen() {
return getBoolean("Fullscreen");
}
/**
* Get the use joysticks state
* @see #setUseJoysticks(boolean)
*/
public boolean useJoysticks() {
return !getBoolean("DisableJoysticks");
}
/**
* Get the audio renderer
* @see #setAudioRenderer(java.lang.String)
*/
public String getAudioRenderer() {
return getString("AudioRenderer");
}
/**
* Get the stereo 3D state
* @see #setStereo3D(boolean)
*/
public boolean useStereo3D(){
return getBoolean("Stereo3D");
}
/**
* Get the icon array
* @see #setIcons(java.lang.Object[])
*/
public Object[] getIcons() {
return (Object[]) get("Icons");
}
/**
* Get the settings dialog image
* @see #setSettingsDialogImage(java.lang.String)
*/
public String getSettingsDialogImage() {
return getString("SettingsDialogImage");
}
public boolean getGammaCorrection() {
return getBoolean("GammaCorrection");
}
/**
* Allows the display window to be resized by dragging its edges.
*
* Only supported for {@link JmeContext.Type#Display} contexts which
* are in windowed mode, ignored for other types.
* The default value is <code>false</code>.
*
* @param resizable True to make a resizable window, false to make a fixed
* size window.
*/
public void setResizable(boolean resizable) {
putBoolean("Resizable", true);
}
/**
* Determine if the display window can be resized by dragging its edges.
*
* @return True if the window is resizable, false if it is fixed size.
*
* @see #setResizable(boolean)
*/
public boolean isResizable() {
return getBoolean("Resizable");
}
/**
* When enabled the display context will swap buffers every frame.
*
* This may need to be disabled when integrating with an external
* library that handles buffer swapping on its own, e.g. Oculus Rift.
* When disabled, the engine will process window messages
* after each frame but it will not swap buffers - note that this
* will cause 100% CPU usage normally as there's no VSync or any framerate
* caps (unless set via {@link #setFrameRate(int) }.
* The default is <code>true</code>.
*
* @param swapBuffers True to enable buffer swapping, false to disable it.
*/
public void setSwapBuffers(boolean swapBuffers) {
putBoolean("SwapBuffers", swapBuffers);
}
/**
* Determine if the the display context will swap buffers every frame.
*
* @return True if buffer swapping is enabled, false otherwise.
*
* @see #setSwapBuffers(boolean)
*/
public boolean isSwapBuffers() {
return getBoolean("SwapBuffers");
}
}
| |
package com.fasterxml.jackson.databind.jsontype;
import java.util.*;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.jsontype.impl.StdSubtypeResolver;
import com.fasterxml.jackson.databind.type.TypeFactory;
/**
* Separate tests for verifying that "type name" type id mechanism
* works.
*/
public class TestTypeNames extends BaseMapTest
{
@SuppressWarnings("serial")
static class AnimalMap extends LinkedHashMap<String,Animal> { }
@JsonTypeInfo(property = "type", include = JsonTypeInfo.As.PROPERTY, use = JsonTypeInfo.Id.NAME)
@JsonSubTypes({
@JsonSubTypes.Type(value = A1616.class,name = "A"),
@JsonSubTypes.Type(value = B1616.class)
})
static abstract class Base1616 { }
static class A1616 extends Base1616 { }
@JsonTypeName("B")
static class B1616 extends Base1616 { }
/*
/**********************************************************
/* Unit tests
/**********************************************************
*/
private final ObjectMapper MAPPER = objectMapper();
public void testBaseTypeId1616() throws Exception
{
ObjectMapper mapper = new ObjectMapper();
Collection<NamedType> subtypes = new StdSubtypeResolver().collectAndResolveSubtypesByTypeId(
mapper.getDeserializationConfig(),
// note: `null` is fine here as `AnnotatedMember`:
null,
mapper.constructType(Base1616.class));
assertEquals(2, subtypes.size());
Set<String> ok = new HashSet<>(Arrays.asList("A", "B"));
for (NamedType type : subtypes) {
String id = type.getName();
if (!ok.contains(id)) {
fail("Unexpected id '"+id+"' (mapping to: "+type.getType()+"), should be one of: "+ok);
}
}
}
public void testSerialization() throws Exception
{
// Note: need to use wrapper array just so that we can define
// static type on serialization. If we had root static types,
// could use those; but at the moment root type is dynamic
assertEquals("[{\"doggy\":{\"name\":\"Spot\",\"ageInYears\":3}}]",
MAPPER.writeValueAsString(new Animal[] { new Dog("Spot", 3) }));
assertEquals("[{\"MaineCoon\":{\"name\":\"Belzebub\",\"purrs\":true}}]",
MAPPER.writeValueAsString(new Animal[] { new MaineCoon("Belzebub", true)}));
}
public void testRoundTrip() throws Exception
{
Animal[] input = new Animal[] {
new Dog("Odie", 7),
null,
new MaineCoon("Piru", false),
new Persian("Khomeini", true)
};
String json = MAPPER.writeValueAsString(input);
List<Animal> output = MAPPER.readValue(json,
TypeFactory.defaultInstance().constructCollectionType(ArrayList.class, Animal.class));
assertEquals(input.length, output.size());
for (int i = 0, len = input.length; i < len; ++i) {
assertEquals("Entry #"+i+" differs, input = '"+json+"'",
input[i], output.get(i));
}
}
public void testRoundTripMap() throws Exception
{
AnimalMap input = new AnimalMap();
input.put("venla", new MaineCoon("Venla", true));
input.put("ama", new Dog("Amadeus", 13));
String json = MAPPER.writeValueAsString(input);
AnimalMap output = MAPPER.readValue(json, AnimalMap.class);
assertNotNull(output);
assertEquals(AnimalMap.class, output.getClass());
assertEquals(input.size(), output.size());
// for some reason, straight comparison won't work...
for (String name : input.keySet()) {
Animal in = input.get(name);
Animal out = output.get(name);
if (!in.equals(out)) {
fail("Animal in input was ["+in+"]; output not matching: ["+out+"]");
}
}
}
}
/*
/**********************************************************
/* Helper types
/**********************************************************
*/
@JsonTypeInfo(use=Id.NAME, include=As.WRAPPER_OBJECT)
@JsonSubTypes({
@Type(value=Dog.class, name="doggy"),
@Type(Cat.class) /* defaults to "TestTypedNames$Cat" then */
})
class Animal
{
public String name;
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null) return false;
if (o.getClass() != getClass()) return false;
return name.equals(((Animal) o).name);
}
@Override
public String toString() {
return getClass().toString() + "('"+name+"')";
}
}
class Dog extends Animal
{
public int ageInYears;
public Dog() { }
public Dog(String n, int y) {
name = n;
ageInYears = y;
}
@Override
public boolean equals(Object o) {
return super.equals(o)
&& ((Dog) o).ageInYears == ageInYears;
}
}
@JsonSubTypes({
@Type(MaineCoon.class),
@Type(Persian.class)
})
abstract class Cat extends Animal {
public boolean purrs;
public Cat() { }
public Cat(String n, boolean p) {
name = n;
purrs = p;
}
@Override
public boolean equals(Object o) {
return super.equals(o) && ((Cat) o).purrs == purrs;
}
@Override
public String toString() {
return super.toString()+"(purrs: "+purrs+")";
}
}
/* uses default name ("MaineCoon") since there's no @JsonTypeName,
* nor did supertype specify name
*/
class MaineCoon extends Cat {
public MaineCoon() { super(); }
public MaineCoon(String n, boolean p) {
super(n, p);
}
}
@JsonTypeName("persialaisKissa")
class Persian extends Cat {
public Persian() { super(); }
public Persian(String n, boolean p) {
super(n, p);
}
}
| |
package android.support.log4j;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Layout;
import org.apache.log4j.helpers.CountingQuietWriter;
import org.apache.log4j.helpers.LogLog;
import org.apache.log4j.helpers.OptionConverter;
import org.apache.log4j.spi.LoggingEvent;
import java.io.File;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.Writer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
/**
* DailyRollingFileAppender extends {@link FileAppender} so that the
* underlying file is rolled over at a user chosen frequency.
* <p>
* DailyRollingFileAppender has been observed to exhibit
* synchronization issues and data loss. The log4j extras
* companion includes alternatives which should be considered
* for new deployments and which are discussed in the documentation
* for org.apache.log4j.rolling.RollingFileAppender.
* <p>
* <p>The rolling schedule is specified by the <b>DatePattern</b>
* option. This pattern should follow the {@link SimpleDateFormat}
* conventions. In particular, you <em>must</em> escape literal text
* within a pair of single quotes. A formatted version of the date
* pattern is used as the suffix for the rolled file name.
* <p>
* <p>For example, if the <b>File</b> option is set to
* <code>/foo/bar.log</code> and the <b>DatePattern</b> set to
* <code>'.'yyyy-MM-dd</code>, on 2001-02-16 at midnight, the logging
* file <code>/foo/bar.log</code> will be copied to
* <code>/foo/bar.log.2001-02-16</code> and logging for 2001-02-17
* will continue in <code>/foo/bar.log</code> until it rolls over
* the next day.
* <p>
* <p>Is is possible to specify monthly, weekly, half-daily, daily,
* hourly, or minutely rollover schedules.
* <p>
* <p><table border="1" cellpadding="2">
* <tr>
* <th>DatePattern</th>
* <th>Rollover schedule</th>
* <th>Example</th>
* <p>
* <tr>
* <td><code>'.'yyyy-MM</code>
* <td>Rollover at the beginning of each month</td>
* <p>
* <td>At midnight of May 31st, 2002 <code>/foo/bar.log</code> will be
* copied to <code>/foo/bar.log.2002-05</code>. Logging for the month
* of June will be output to <code>/foo/bar.log</code> until it is
* also rolled over the next month.
* <p>
* <tr>
* <td><code>'.'yyyy-ww</code>
* <p>
* <td>Rollover at the first day of each week. The first day of the
* week depends on the locale.</td>
* <p>
* <td>Assuming the first day of the week is Sunday, on Saturday
* midnight, June 9th 2002, the file <i>/foo/bar.log</i> will be
* copied to <i>/foo/bar.log.2002-23</i>. Logging for the 24th week
* of 2002 will be output to <code>/foo/bar.log</code> until it is
* rolled over the next week.
* <p>
* <tr>
* <td><code>'.'yyyy-MM-dd</code>
* <p>
* <td>Rollover at midnight each day.</td>
* <p>
* <td>At midnight, on March 8th, 2002, <code>/foo/bar.log</code> will
* be copied to <code>/foo/bar.log.2002-03-08</code>. Logging for the
* 9th day of March will be output to <code>/foo/bar.log</code> until
* it is rolled over the next day.
* <p>
* <tr>
* <td><code>'.'yyyy-MM-dd-a</code>
* <p>
* <td>Rollover at midnight and midday of each day.</td>
* <p>
* <td>At noon, on March 9th, 2002, <code>/foo/bar.log</code> will be
* copied to <code>/foo/bar.log.2002-03-09-AM</code>. Logging for the
* afternoon of the 9th will be output to <code>/foo/bar.log</code>
* until it is rolled over at midnight.
* <p>
* <tr>
* <td><code>'.'yyyy-MM-dd-HH</code>
* <p>
* <td>Rollover at the top of every hour.</td>
* <p>
* <td>At approximately 11:00.000 o'clock on March 9th, 2002,
* <code>/foo/bar.log</code> will be copied to
* <code>/foo/bar.log.2002-03-09-10</code>. Logging for the 11th hour
* of the 9th of March will be output to <code>/foo/bar.log</code>
* until it is rolled over at the beginning of the next hour.
* <p>
* <p>
* <tr>
* <td><code>'.'yyyy-MM-dd-HH-mm</code>
* <p>
* <td>Rollover at the beginning of every minute.</td>
* <p>
* <td>At approximately 11:23,000, on March 9th, 2001,
* <code>/foo/bar.log</code> will be copied to
* <code>/foo/bar.log.2001-03-09-10-22</code>. Logging for the minute
* of 11:23 (9th of March) will be output to
* <code>/foo/bar.log</code> until it is rolled over the next minute.
* <p>
* </table>
* <p>
* <p>Do not use the colon ":" character in anywhere in the
* <b>DatePattern</b> option. The text before the colon is interpreted
* as the protocol specification of a URL which is probably not what
* you want.
*/
public class DailyRollingFileAppender extends FileAppender {
// The code assumes that the following constants are in a increasing
// sequence.
static final int TOP_OF_TROUBLE = -1;
static final int TOP_OF_MINUTE = 0;
static final int TOP_OF_HOUR = 1;
static final int HALF_DAY = 2;
static final int TOP_OF_DAY = 3;
static final int TOP_OF_WEEK = 4;
static final int TOP_OF_MONTH = 5;
/**
* The date pattern. By default, the pattern is set to
* "'.'yyyy-MM-dd" meaning daily rollover.
*/
private String datePattern = "'.'yyyy-MM-dd";
/**
* The log file will be renamed to the value of the
* scheduledFilename variable when the next interval is entered. For
* example, if the rollover period is one hour, the log file will be
* renamed to the value of "scheduledFilename" at the beginning of
* the next hour.
* <p>
* The precise time when a rollover occurs depends on logging
* activity.
*/
private String scheduledFilename;
/**
* The next time we estimate a rollover should occur.
*/
private long nextCheck = System.currentTimeMillis() - 1;
Date now = new Date();
SimpleDateFormat sdf;
RollingCalendar rc = new RollingCalendar();
int checkPeriod = TOP_OF_TROUBLE;
// The gmtTimeZone is used only in computeCheckPeriod() method.
static final TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT");
// field copy from RollingFileAppender
/**
* The default maximum file size is 10MB.
*/
protected long maxFileSize = 10 * 1024 * 1024;
/**
* There is one backup files by default.
*/
protected int maxBackupIndex = 1;
private long nextRollover = 0;
/**
* -1 means keep all logs of every day
*/
private int maxBackupDays = -1;
private long maxBackupPeriod = 0;
/**
* Previous backup time
*/
private long previousCheck = 0;
/**
* The default constructor does nothing.
*/
public DailyRollingFileAppender() {
}
/**
* Instantiate a <code>DailyRollingFileAppender</code> and open the
* file designated by <code>filename</code>. The opened filename will
* become the output destination for this appender.
*/
public DailyRollingFileAppender(Layout layout, String filename,
String datePattern) throws IOException {
super(layout, filename, true);
this.datePattern = datePattern;
activateOptions();
}
/**
* The <b>DatePattern</b> takes a string in the same format as
* expected by {@link SimpleDateFormat}. This options determines the
* rollover schedule.
*/
public void setDatePattern(String pattern) {
datePattern = pattern;
}
/**
* Returns the value of the <b>DatePattern</b> option.
*/
public String getDatePattern() {
return datePattern;
}
public void activateOptions() {
super.activateOptions();
if (datePattern != null && fileName != null) {
now.setTime(System.currentTimeMillis());
sdf = new SimpleDateFormat(datePattern);
int type = computeCheckPeriod();
printPeriodicity(type);
rc.setType(type);
File file = new File(fileName);
scheduledFilename = fileName + sdf.format(new Date(file.lastModified()));
} else {
LogLog.error("Either File or DatePattern options are not set for appender ["
+ name + "].");
}
}
void printPeriodicity(int type) {
switch (type) {
case TOP_OF_MINUTE:
LogLog.debug("Appender [" + name + "] to be rolled every minute.");
break;
case TOP_OF_HOUR:
LogLog.debug("Appender [" + name
+ "] to be rolled on top of every hour.");
break;
case HALF_DAY:
LogLog.debug("Appender [" + name
+ "] to be rolled at midday and midnight.");
break;
case TOP_OF_DAY:
LogLog.debug("Appender [" + name
+ "] to be rolled at midnight.");
break;
case TOP_OF_WEEK:
LogLog.debug("Appender [" + name
+ "] to be rolled at start of week.");
break;
case TOP_OF_MONTH:
LogLog.debug("Appender [" + name
+ "] to be rolled at start of every month.");
break;
default:
LogLog.warn("Unknown periodicity for appender [" + name + "].");
}
}
// This method computes the roll over period by looping over the
// periods, starting with the shortest, and stopping when the r0 is
// different from from r1, where r0 is the epoch formatted according
// the datePattern (supplied by the user) and r1 is the
// epoch+nextMillis(i) formatted according to datePattern. All date
// formatting is done in GMT and not local format because the test
// logic is based on comparisons relative to 1970-01-01 00:00:00
// GMT (the epoch).
int computeCheckPeriod() {
RollingCalendar rollingCalendar = new RollingCalendar(gmtTimeZone, Locale.getDefault());
// set sate to 1970-01-01 00:00:00 GMT
Date epoch = new Date(0);
if (datePattern != null) {
for (int i = TOP_OF_MINUTE; i <= TOP_OF_MONTH; i++) {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat(datePattern);
simpleDateFormat.setTimeZone(gmtTimeZone); // do all date formatting in GMT
String r0 = simpleDateFormat.format(epoch);
rollingCalendar.setType(i);
Date next = new Date(rollingCalendar.getNextCheckMillis(epoch));
String r1 = simpleDateFormat.format(next);
//System.out.println("Type = "+i+", r0 = "+r0+", r1 = "+r1);
if (r0 != null && r1 != null && !r0.equals(r1)) {
return i;
}
}
}
return TOP_OF_TROUBLE; // Deliberately head for trouble...
}
/**
* Rollover the current file to a new file.
*/
void rollOver() throws IOException {
/* Compute filename, but only if datePattern is specified */
if (datePattern == null) {
errorHandler.error("Missing DatePattern option in rollOver().");
return;
}
String datedFilename = fileName + sdf.format(now);
// It is too early to roll over because we are still within the
// bounds of the current interval. Rollover will occur once the
// next interval is reached.
if (scheduledFilename.equals(datedFilename)) {
return;
}
// close current file, and rename it to datedFilename
this.closeFile();
File target = new File(scheduledFilename);
if (target.exists()) {
target.delete();
}
File file = new File(fileName);
boolean result = file.renameTo(target);
if (result) {
LogLog.debug(fileName + " -> " + scheduledFilename);
} else {
LogLog.error("Failed to rename [" + fileName + "] to [" + scheduledFilename + "].");
}
try {
// This will also close the file. This is OK since multiple
// close operations are safe.
this.setFile(fileName, true, this.bufferedIO, this.bufferSize);
nextRollover = 0;
} catch (IOException e) {
errorHandler.error("setFile(" + fileName + ", true) call failed.");
}
scheduledFilename = datedFilename;
}
/**
* Implements the usual roll over behaviour.
* <p>
* <p>If <code>MaxBackupIndex</code> is positive, then files
* {<code>File.1</code>, ..., <code>File.MaxBackupIndex -1</code>}
* are renamed to {<code>File.2</code>, ...,
* <code>File.MaxBackupIndex</code>}. Moreover, <code>File</code> is
* renamed <code>File.1</code> and closed. A new <code>File</code> is
* created to receive further log output.
* <p>
* <p>If <code>MaxBackupIndex</code> is equal to zero, then the
* <code>File</code> is truncated with no backup files created.
*/
void rollOverForBackup() {
File target;
File file;
if (qw != null) {
long size = ((CountingQuietWriter) qw).getCount();
LogLog.debug("rolling over count=" + size);
// if operation fails, do not roll again until
// maxFileSize more bytes are written
nextRollover = size + maxFileSize;
}
LogLog.debug("maxBackupIndex=" + maxBackupIndex);
boolean renameSucceeded = true;
// If maxBackups <= 0, then there is no file renaming to be done.
if (maxBackupIndex > 0) {
// Delete the oldest file, to keep Windows happy.
file = new File(scheduledFilename + '.' + maxBackupIndex);
if (file.exists())
renameSucceeded = file.delete();
// Map {(maxBackupIndex - 1), ..., 2, 1} to {maxBackupIndex, ..., 3, 2}
for (int i = maxBackupIndex - 1; i >= 1 && renameSucceeded; i--) {
file = new File(scheduledFilename + "." + i);
if (file.exists()) {
target = new File(scheduledFilename + '.' + (i + 1));
LogLog.debug("Renaming file " + file + " to " + target);
renameSucceeded = file.renameTo(target);
}
}
if (renameSucceeded) {
// Rename fileName to scheduledFilename.1
target = new File(scheduledFilename + "." + 1);
this.closeFile(); // keep windows happy.
file = new File(fileName);
LogLog.debug("Renaming file " + file + " to " + target);
renameSucceeded = file.renameTo(target);
//
// if file rename failed, reopen file with append = true
//
if (!renameSucceeded) {
try {
this.setFile(fileName, true, bufferedIO, bufferSize);
} catch (IOException e) {
if (e instanceof InterruptedIOException) {
Thread.currentThread().interrupt();
}
LogLog.error("setFile(" + fileName + ", true) call failed.", e);
}
}
}
}
//
// if all renames were successful, then
//
if (renameSucceeded) {
try {
// This will also close the file. This is OK since multiple
// close operations are safe.
this.setFile(fileName, false, bufferedIO, bufferSize);
nextRollover = 0;
} catch (IOException e) {
if (e instanceof InterruptedIOException) {
Thread.currentThread().interrupt();
}
LogLog.error("setFile(" + fileName + ", false) call failed.", e);
}
}
}
/**
* Get the maximum size that the output file is allowed to reach
* before being rolled over to backup files.
*/
public long getMaximumFileSize() {
return maxFileSize;
}
/**
* Returns the value of the <b>MaxBackupIndex</b> option.
*/
public int getMaxBackupIndex() {
return maxBackupIndex;
}
/**
* Set the maximum number of backup files to keep around.
* <p>
* <p>The <b>MaxBackupIndex</b> option determines how many backup
* files are kept before the oldest is erased. This option takes
* a positive integer value. If set to zero, then there will be no
* backup files and the log file will be truncated when it reaches
* <code>MaxFileSize</code>.
*/
public void setMaxBackupIndex(int maxBackups) {
this.maxBackupIndex = maxBackups;
}
/**
* Set the maximum size that the output file is allowed to reach
* before being rolled over to backup files.
* <p>
* <p>This method is equivalent to {@link #setMaxFileSize} except
* that it is required for differentiating the setter taking a
* <code>long</code> argument from the setter taking a
* <code>String</code> argument by the JavaBeans
*
* @see #setMaxFileSize(String)
*/
public void setMaximumFileSize(long maxFileSize) {
this.maxFileSize = maxFileSize;
}
/**
* Set the maximum size that the output file is allowed to reach
* before being rolled over to backup files.
* <p>
* <p>In configuration files, the <b>MaxFileSize</b> option takes an
* long integer in the range 0 - 2^63. You can specify the value
* with the suffixes "KB", "MB" or "GB" so that the integer is
* interpreted being expressed respectively in kilobytes, megabytes
* or gigabytes. For example, the value "10KB" will be interpreted
* as 10240.
*/
public void setMaxFileSize(String value) {
maxFileSize = OptionConverter.toFileSize(value, maxFileSize + 1);
}
/**
* Returns the value of the <b>MaxBackupDays</b> option.
*/
public int getMaxBackupDays() {
return maxBackupDays;
}
/**
* Set the maximum days the log should keep, <tt>-1</tt> means keep all the logs
*
* @param maxBackupDays maxBackupDays
*/
public void setMaxBackupDays(int maxBackupDays) {
if (maxBackupDays > 0) {
this.maxBackupDays = maxBackupDays;
this.maxBackupPeriod = maxBackupDays * 24 * 60 * 60 * 1000;
} else {
this.maxBackupDays = -1;
this.maxBackupPeriod = 0;
}
}
@Override
protected void setQWForFiles(Writer writer) {
this.qw = new CountingQuietWriter(writer, errorHandler);
}
@Override
public synchronized void setFile(String fileName, boolean append, boolean bufferedIO, int bufferSize) throws IOException {
super.setFile(fileName, append, bufferedIO, bufferSize);
if (append) {
if (qw != null) {
File f = new File(fileName);
((CountingQuietWriter) qw).setCount(f.length());
}
}
}
/**
* This method differentiates DailyRollingFileAppender from its
* super class.
* <p>
* <p>Before actually logging, this method will check whether it is
* time to do a rollover. If it is, it will schedule the next
* rollover time and then rollover.
*/
protected void subAppend(LoggingEvent event) {
long n = System.currentTimeMillis();
if (n >= nextCheck) {
now.setTime(n);
nextCheck = rc.getNextCheckMillis(now);
try {
rollOver();
} catch (IOException ioe) {
if (ioe instanceof InterruptedIOException) {
Thread.currentThread().interrupt();
}
LogLog.error("rollOver() failed.", ioe);
}
}
if (maxBackupPeriod > 0 && (n - previousCheck) >= maxBackupPeriod) {
now.setTime(n);
previousCheck = rc.getPreviousCheckMillis(now, maxBackupDays);
scheduleCleanTask();
}
// if file not exists, create a new file
File file = new File(fileName);
if (!file.exists()) {
try {
setFile(fileName, true, bufferedIO, bufferSize);
} catch (IOException ignored) {
}
}
// no need write log if qw is null
if (qw != null) {
super.subAppend(event);
}
if (fileName != null && qw != null) {
long size = ((CountingQuietWriter) qw).getCount();
if (size >= maxFileSize && size >= nextRollover) {
rollOverForBackup();
}
}
}
private void scheduleCleanTask() {
String datePattern = getDatePattern();
String fileName = getFile();
SimpleDateFormat sdf = new SimpleDateFormat(datePattern);
String deletePrefix = fileName + sdf.format(rc.getPreviousCheckDate(now, maxBackupDays));
File file = new File(fileName);
File parent = file.getParentFile();
if (parent.exists() && parent.isDirectory()) {
File[] files = parent.listFiles();
if (files != null && files.length > 0) {
for (File f : files) {
if (f.getAbsolutePath().equals(fileName)) {
continue;
}
if (f.getAbsolutePath().compareTo(deletePrefix) < 0) {
f.delete();
}
}
}
}
}
}
| |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.rest.resources;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.onosproject.net.DeviceId;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.meter.DefaultMeterRequest;
import org.onosproject.net.meter.Meter;
import org.onosproject.net.meter.MeterId;
import org.onosproject.net.meter.MeterRequest;
import org.onosproject.net.meter.MeterService;
import org.onosproject.rest.AbstractWebResource;
import org.slf4j.Logger;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.io.InputStream;
import static org.onlab.util.Tools.nullIsNotFound;
import static org.onlab.util.Tools.readTreeFromStream;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Query and program meter rules.
*/
@Path("meters")
public class MetersWebResource extends AbstractWebResource {
@Context
private UriInfo uriInfo;
private final Logger log = getLogger(getClass());
private static final String DEVICE_INVALID = "Invalid deviceId in meter creation request";
private static final String METER_NOT_FOUND = "Meter is not found for ";
private final MeterService meterService = get(MeterService.class);
private final ObjectNode root = mapper().createObjectNode();
private final ArrayNode metersNode = root.putArray("meters");
/**
* Returns all meters of all devices.
*
* @return 200 OK with array of all the meters in the system
* @onos.rsModel Meters
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getMeters() {
final Iterable<Meter> meters = meterService.getAllMeters();
if (meters != null) {
meters.forEach(meter -> metersNode.add(codec(Meter.class).encode(meter, this)));
}
return ok(root).build();
}
/**
* Returns a collection of meters by the device id.
*
* @param deviceId device identifier
* @return 200 OK with array of meters which belongs to specified device
* @onos.rsModel Meters
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{deviceId}")
public Response getMetersByDeviceId(@PathParam("deviceId") String deviceId) {
DeviceId did = DeviceId.deviceId(deviceId);
final Iterable<Meter> meters = meterService.getMeters(did);
if (meters != null) {
meters.forEach(meter -> metersNode.add(codec(Meter.class).encode(meter, this)));
}
return ok(root).build();
}
/**
* Returns a meter by the meter id.
*
* @param deviceId device identifier
* @param meterId meter identifier
* @return 200 OK with a meter, return 404 if no entry has been found
* @onos.rsModel Meter
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{deviceId}/{meterId}")
public Response getMeterByDeviceIdAndMeterId(@PathParam("deviceId") String deviceId,
@PathParam("meterId") String meterId) {
DeviceId did = DeviceId.deviceId(deviceId);
MeterId mid = MeterId.meterId(Long.valueOf(meterId));
final Meter meter = nullIsNotFound(meterService.getMeter(did, mid),
METER_NOT_FOUND + mid.id());
metersNode.add(codec(Meter.class).encode(meter, this));
return ok(root).build();
}
/**
* Creates new meter rule. Creates and installs a new meter rule for the
* specified device.
*
* @param deviceId device identifier
* @param stream meter rule JSON
* @return status of the request - CREATED if the JSON is correct,
* BAD_REQUEST if the JSON is invalid
* @onos.rsModel MeterPost
*/
@POST
@Path("{deviceId}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createMeter(@PathParam("deviceId") String deviceId,
InputStream stream) {
try {
ObjectNode jsonTree = readTreeFromStream(mapper(), stream);
JsonNode specifiedDeviceId = jsonTree.get("deviceId");
if ((specifiedDeviceId != null &&
!specifiedDeviceId.asText().equals(deviceId)) ||
get(DeviceService.class).getDevice(DeviceId.deviceId(deviceId))
== null) {
throw new IllegalArgumentException(DEVICE_INVALID);
}
jsonTree.put("deviceId", deviceId);
final MeterRequest meterRequest = codec(MeterRequest.class)
.decode(jsonTree, this);
final Meter meter = meterService.submit(meterRequest);
UriBuilder locationBuilder = uriInfo.getBaseUriBuilder()
.path("meters")
.path(deviceId)
.path(Long.toString(meter.id().id()));
return Response
.created(locationBuilder.build())
.build();
} catch (IOException ex) {
throw new IllegalArgumentException(ex);
}
}
/**
* Removes the specified meter.
*
* @param deviceId device identifier
* @param meterId meter identifier
* @return 204 NO CONTENT
*/
@DELETE
@Path("{deviceId}/{meterId}")
public Response deleteMeterByDeviceIdAndMeterId(@PathParam("deviceId") String deviceId,
@PathParam("meterId") String meterId) {
DeviceId did = DeviceId.deviceId(deviceId);
MeterId mid = MeterId.meterId(Long.valueOf(meterId));
final Meter tmpMeter = meterService.getMeter(did, mid);
if (tmpMeter != null) {
final MeterRequest meterRequest = meterToMeterRequest(tmpMeter, "REMOVE");
if (meterRequest != null) {
meterService.withdraw(meterRequest, tmpMeter.id());
}
} else {
log.warn("Meter {}, is not present", tmpMeter);
}
return Response.noContent().build();
}
/**
* Converts a meter instance to meterRequest instance with a certain operation.
*
* @param meter meter instance
* @param operation operation
* @return converted meterRequest instance
*/
private MeterRequest meterToMeterRequest(Meter meter, String operation) {
MeterRequest.Builder builder;
MeterRequest meterRequest;
if (meter == null) {
return null;
}
if (meter.isBurst()) {
builder = DefaultMeterRequest.builder()
.fromApp(meter.appId())
.forDevice(meter.deviceId())
.withUnit(meter.unit())
.withBands(meter.bands())
.burst();
} else {
builder = DefaultMeterRequest.builder()
.fromApp(meter.appId())
.forDevice(meter.deviceId())
.withUnit(meter.unit())
.withBands(meter.bands());
}
switch (operation) {
case "ADD":
meterRequest = builder.add();
break;
case "REMOVE":
meterRequest = builder.remove();
break;
default:
log.warn("Invalid operation {}.", operation);
return null;
}
return meterRequest;
}
}
| |
package org.basex.qt3ts.op;
import org.basex.tests.bxapi.*;
import org.basex.tests.qt3ts.*;
/**
* Tests for the date-less-than() function.
*
* @author BaseX Team 2005-15, BSD License
* @author Leo Woerteler
*/
@SuppressWarnings("all")
public class OpDateLessThan extends QT3TestSet {
/**
*
* *******************************************************
* Test: K-DateLT-1
* Written by: Frans Englich
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of 'lt' for xs:date.
* *******************************************************
* .
*/
@org.junit.Test
public void kDateLT1() {
final XQuery query = new XQuery(
"xs:date(\"2004-07-12\") lt xs:date(\"2004-07-13\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: K-DateLT-2
* Written by: Frans Englich
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of 'lt' for xs:date.
* *******************************************************
* .
*/
@org.junit.Test
public void kDateLT2() {
final XQuery query = new XQuery(
"not(xs:date(\"2004-07-13\") lt xs:date(\"2004-07-12\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: K-DateLT-3
* Written by: Frans Englich
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of 'lt' for xs:date.
* *******************************************************
* .
*/
@org.junit.Test
public void kDateLT3() {
final XQuery query = new XQuery(
"not(xs:date(\"2004-07-13\") lt xs:date(\"2004-07-13\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: K-DateLT-4
* Written by: Frans Englich
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of 'le' for xs:date.
* *******************************************************
* .
*/
@org.junit.Test
public void kDateLT4() {
final XQuery query = new XQuery(
"xs:date(\"2004-07-12\") le xs:date(\"2004-07-12\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: K-DateLT-5
* Written by: Frans Englich
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of 'le' for xs:date.
* *******************************************************
* .
*/
@org.junit.Test
public void kDateLT5() {
final XQuery query = new XQuery(
"xs:date(\"2004-07-12\") le xs:date(\"2004-07-12\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: K-DateLT-6
* Written by: Frans Englich
* Date: 2007-11-22T11:31:21+01:00
* Purpose: Simple test of 'le' for xs:date.
* *******************************************************
* .
*/
@org.junit.Test
public void kDateLT6() {
final XQuery query = new XQuery(
"not(xs:date(\"2004-07-13\") le xs:date(\"2004-07-12\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test that comparing large dates does not overflow. .
*/
@org.junit.Test
public void cbclDateGe001() {
final XQuery query = new XQuery(
"xs:date(\"25252734927766555-07-28\") >= xs:date(\"-25252734927766555-06-07+02:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
(
error("FODT0001")
||
assertBoolean(true)
)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan001() {
final XQuery query = new XQuery(
"\n" +
" \tdeclare function local:two-digit($number as xs:integer) { \n" +
" \t\tlet $string := string($number) return if (string-length($string) lt 2) then concat('0', $string) else $string \n" +
" \t}; \n" +
" \tdeclare function local:date($year as xs:integer, $month as xs:integer, $day as xs:integer) { \n" +
" \t\tlet $m := local:two-digit($month), $d := local:two-digit($day) return xs:date(concat($year, '-', $m, '-', $d)) \n" +
" \t}; \n" +
" \tnot(local:date(2008, 05, 12) lt xs:date(\"1972-12-15\"))\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan002() {
final XQuery query = new XQuery(
"xs:date(\"2008-01-30\") lt xs:date(\"2008-01-31+09:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan003() {
final XQuery query = new XQuery(
"xs:date(\"2008-01-31+09:00\") lt xs:date(\"2008-01-30\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan004() {
final XQuery query = new XQuery(
"xs:date(\"2008-01-31\") lt xs:date(\"2008-01-31+09:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan005() {
final XQuery query = new XQuery(
"xs:date(\"2008-01-31+09:00\") lt xs:date(\"2008-01-31\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan006() {
final XQuery query = new XQuery(
"\n" +
" \tdeclare function local:two-digit($number as xs:integer) { let $string := string($number) return if (string-length($string) lt 2) then concat('0', $string) else $string }; \n" +
" \tdeclare function local:date($year as xs:integer, $month as xs:integer, $day as xs:integer) { let $m := local:two-digit($month), $d := local:two-digit($day) return xs:date(concat($year, '-', $m, '-', $d)) }; \n" +
" \tnot(local:date(2008, 05, 12) le xs:date(\"1972-12-15\"))\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan007() {
final XQuery query = new XQuery(
"xs:date(\"2008-01-30\") le xs:date(\"2008-01-31+09:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan008() {
final XQuery query = new XQuery(
"xs:date(\"2008-01-31+09:00\") le xs:date(\"2008-01-30\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan009() {
final XQuery query = new XQuery(
"xs:date(\"2008-01-31\") le xs:date(\"2008-01-31+09:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan010() {
final XQuery query = new XQuery(
"xs:date(\"2008-01-31+09:00\") le xs:date(\"2008-01-31\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan011() {
final XQuery query = new XQuery(
"\n" +
" \tdeclare function local:date($date as xs:date, $null as xs:boolean) { if ($null) then () else $date }; \n" +
" \texists(local:date(xs:date(\"1972-12-15\"), fn:true()) lt xs:date(\"1972-12-15\"))\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan012() {
final XQuery query = new XQuery(
"\n" +
" \tdeclare function local:date($date as xs:date, $null as xs:boolean) { if ($null) then () else $date }; \n" +
" \tlocal:date(xs:date(\"1972-12-15\"), fn:false()) lt xs:date(\"1972-12-15\")\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan013() {
final XQuery query = new XQuery(
"\n" +
" \tdeclare function local:date($date as xs:date, $null as xs:boolean) { if ($null) then () else $date }; \n" +
" \texists(local:date(xs:date(\"1972-12-15\"), fn:true()) ge xs:date(\"1972-12-15\"))\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan014() {
final XQuery query = new XQuery(
"\n" +
" \tdeclare function local:date($date as xs:date, $null as xs:boolean) { if ($null) then () else $date }; \n" +
" \tlocal:date(xs:date(\"1972-12-15\"), fn:false()) ge xs:date(\"1972-12-15\")\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan015() {
final XQuery query = new XQuery(
"\n" +
" \tdeclare function local:date($date as xs:string, $timezone as xs:string) { xs:date( concat($date, $timezone) ) }; \n" +
" \tadjust-date-to-timezone(local:date(\"1972-12-14\", \"-12:00\")) lt adjust-date-to-timezone(xs:date(\"1972-12-15+12:00\"))\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* test comparison of date .
*/
@org.junit.Test
public void cbclDateLessThan016() {
final XQuery query = new XQuery(
"\n" +
" \tdeclare function local:date($date as xs:string, $timezone as xs:string) { xs:date( concat($date, $timezone) ) }; \n" +
" \tadjust-date-to-timezone(local:date(\"1972-12-14\", \"-12:00\")) le adjust-date-to-timezone(xs:date(\"1972-12-15+12:00\"))\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test that comparing large dates does not overflow. .
*/
@org.junit.Test
public void cbclDateLt001() {
final XQuery query = new XQuery(
"xs:date(\"-25252734927766555-06-07+02:00\") < xs:date(\"25252734927766555-07-28\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
(
error("FODT0001")
||
assertBoolean(true)
)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-1
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function
* As per example 1 (for this function)of the F&O specs
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan1() {
final XQuery query = new XQuery(
"(xs:date(\"2004-12-25Z\") lt xs:date(\"2004-12-25-05:00\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-10
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function used
* together with "or" expression (le operator).
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan10() {
final XQuery query = new XQuery(
"(xs:date(\"1976-10-25Z\") le xs:date(\"1976-10-28Z\")) or (xs:date(\"1980-08-11Z\") le xs:date(\"1980-08-10Z\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-11
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function used
* together with "fn:true"/or expression (lt operator).
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan11() {
final XQuery query = new XQuery(
"(xs:date(\"1980-05-18Z\") lt xs:date(\"1980-05-17Z\")) or (fn:true())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-12
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function used
* together with "fn:true"/or expression (le operator).
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan12() {
final XQuery query = new XQuery(
"(xs:date(\"2000-10-25Z\") le xs:date(\"2000-10-26Z\")) or (fn:true())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-13
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function used
* together with "fn:false"/or expression (lt operator).
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan13() {
final XQuery query = new XQuery(
"(xs:date(\"1980-01-01Z\") lt xs:date(\"1980-10-01Z\")) or (fn:false())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-14
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function used
* together with "fn:false"/or expression (le operator).
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan14() {
final XQuery query = new XQuery(
"(xs:date(\"1980-10-25Z\") le xs:date(\"1980-10-26Z\")) or (fn:false())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-2
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function
* As per example 2 (for this function) of the F&O specs
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2() {
final XQuery query = new XQuery(
"(xs:date(\"2004-12-25-12:00\") le xs:date(\"2004-12-26+12:00\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-3
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function that
* return true and used together with fn:not (lt operator)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan3() {
final XQuery query = new XQuery(
"fn:not((xs:date(\"2005-15-25Z\") lt xs:date(\"2005-15-26Z\")))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-4
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function that
* return true and used together with fn:not (le operator)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan4() {
final XQuery query = new XQuery(
"fn:not(xs:date(\"2005-04-02Z\") le xs:date(\"2005-04-02Z\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-5
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function that
* return false and used together with fn:not (lt operator)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan5() {
final XQuery query = new XQuery(
"fn:not(xs:date(\"2000-12-25Z\") lt xs:date(\"2000-11-25Z\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-6
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function that
* return false and used together with fn:not(le operator)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan6() {
final XQuery query = new XQuery(
"fn:not(xs:date(\"2005-10-25Z\") le xs:date(\"2005-10-23Z\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-7
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function used
* together with "and" expression (lt operator).
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan7() {
final XQuery query = new XQuery(
"(xs:date(\"2000-01-01Z\") lt xs:date(\"2000-01-01Z\")) and (xs:date(\"2001-02-02Z\") lt xs:date(\"2001-03-02Z\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-8
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function used
* together with "and" expression (le operator).
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan8() {
final XQuery query = new XQuery(
"(xs:date(\"2000-01-25Z\") le xs:date(\"2000-10-26Z\")) and (xs:date(\"1975-10-26Z\") le xs:date(\"1975-10-28Z\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than-9
* Written By: Carmelo Montanez
* Date: June 13, 2005
* Purpose: Evaluates The "date-less-than" function used
* together with "or" expression (lt operator).
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan9() {
final XQuery query = new XQuery(
"(xs:date(\"2000-10-26Z\") lt xs:date(\"2000-10-28Z\")) or (xs:date(\"1976-10-28Z\") lt xs:date(\"1976-10-28Z\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-1
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(lower bound)
* $arg2 = xs:date(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args1() {
final XQuery query = new XQuery(
"xs:date(\"1970-01-01Z\") lt xs:date(\"1970-01-01Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-10
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(lower bound)
* $arg2 = xs:date(upper bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args10() {
final XQuery query = new XQuery(
"xs:date(\"1970-01-01Z\") ge xs:date(\"2030-12-31Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-2
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(mid range)
* $arg2 = xs:date(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args2() {
final XQuery query = new XQuery(
"xs:date(\"1983-11-17Z\") lt xs:date(\"1970-01-01Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-3
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(upper bound)
* $arg2 = xs:date(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args3() {
final XQuery query = new XQuery(
"xs:date(\"2030-12-31Z\") lt xs:date(\"1970-01-01Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-4
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(lower bound)
* $arg2 = xs:date(mid range)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args4() {
final XQuery query = new XQuery(
"xs:date(\"1970-01-01Z\") lt xs:date(\"1983-11-17Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-5
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(lower bound)
* $arg2 = xs:date(upper bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args5() {
final XQuery query = new XQuery(
"xs:date(\"1970-01-01Z\") lt xs:date(\"2030-12-31Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-6
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(lower bound)
* $arg2 = xs:date(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args6() {
final XQuery query = new XQuery(
"xs:date(\"1970-01-01Z\") ge xs:date(\"1970-01-01Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-7
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(mid range)
* $arg2 = xs:date(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args7() {
final XQuery query = new XQuery(
"xs:date(\"1983-11-17Z\") ge xs:date(\"1970-01-01Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-8
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(upper bound)
* $arg2 = xs:date(lower bound)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args8() {
final XQuery query = new XQuery(
"xs:date(\"2030-12-31Z\") ge xs:date(\"1970-01-01Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
*
* *******************************************************
* Test: op-date-less-than2args-9
* Written By: Carmelo Montanez
* Date: Tue Apr 12 16:29:07 GMT-05:00 2005
* Purpose: Evaluates The "op:date-less-than" operator
* with the arguments set as follows:
* $arg1 = xs:date(lower bound)
* $arg2 = xs:date(mid range)
* *******************************************************
* .
*/
@org.junit.Test
public void opDateLessThan2args9() {
final XQuery query = new XQuery(
"xs:date(\"1970-01-01Z\") ge xs:date(\"1983-11-17Z\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
}
| |
package org.i3xx.step.clockmongo.service.impl;
/*
* #%L
* NordApp OfficeBase :: clockmongo
* %%
* Copyright (C) 2014 - 2015 I.D.S. DialogSysteme GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.ArrayList;
import java.util.List;
import org.i3xx.step.clock.service.model.ClockPersistenceService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.MongoClient;
public class ClockPersistenceServiceImpl implements ClockPersistenceService {
private static Logger logger = LoggerFactory.getLogger(ClockPersistenceService.class);
/** The name of the intern database */
private static final String DB_NAME = "naCLOCKSTORE";
/** Index in ascending order */
private static final int ASC = -1;
private String host;
private int port;
private MongoClient mongo;
private DB db;
public ClockPersistenceServiceImpl() {
host = "localhost";
port = 27017;
mongo = null;
db = null;
}
/**
* Startup the service
*
* @throws Exception
*/
public void startUp() throws Exception {
logger.debug("Starts the clock store service 'ClockPersistenceServiceImpl'");
mongo = new MongoClient(host, port);
db = mongo.getDB(DB_NAME);
}
/**
* Shutdown the service
*
* @throws Exception
*/
public void shutDown() throws Exception {
logger.debug("Shutdown the clock store service 'ClockPersistenceServiceImpl'");
mongo.close();
mongo = null;
}
/**
* @param nspc The namespace
* @return The list of symbols as an array
*/
public String[] getSymbols(String nspc) {
logger.trace("Searches all symbols nspc:{}", nspc);
DBCollection col = ensureCollection(nspc, null);
DBCursor cursor = col.find();
List<String> list = new ArrayList<String>();
while(cursor.hasNext()) {
String symbol = (String)cursor.next().get("symbol");
list.add(symbol);
}
return list.toArray(new String[list.size()]);
}
/**
* @param nspc The namespace
* @param stmt The time statement
* @param symbol The symbol to add
*/
public void addMapping(String nspc, String stmt, String symbol) {
logger.trace("Adds the object nspc:{} symbol:{} stmt:{}", nspc, symbol, stmt);
DBCollection col = ensureCollection(nspc, symbol);
DBObject dbo = ensureObject(col, symbol);
dbo.put("statement", stmt);
col.update(symbolQuery(symbol), dbo, false, false);
}
/**
* @param nspc The namespace
* @param symbol The symbol to remove
*/
public void removeMapping(String nspc, String symbol) {
logger.trace("Removes the object nspc:{}, symbol:{}", nspc, symbol);
DBCollection col = ensureCollection(nspc, symbol);
col.remove(symbolQuery(symbol));
}
/**
* @param nspc The namespace
* @param symbol The symbol
* @return The time statement
*/
public String getMapping(String nspc, String symbol) {
logger.trace("Retrieves the object nspc:{}, symbol:{}", nspc, symbol);
DBCollection col = ensureCollection(nspc, symbol);
DBObject dbo = col.findOne(symbolQuery(symbol));
if(dbo==null){
return null;
}
return (String)dbo.get("statement");
}
//
//
//
/**
* @param nspc The namespace
* @param symbol The symbol
* @return The DBCollection
*/
private DBCollection ensureCollection(String nspc, String symbol) {
DBCollection col = null;
if( db.collectionExists(nspc) ) {
col = db.getCollection(nspc);
}else{
col = db.getCollection(nspc);
col.createIndex(new BasicDBObject("symbol", ASC));
}//fi
return col;
}
/**
* @param col The collection to get the object from
* @param symbol The symbol (key)
* @param timeout The timeout or -1 for no timeout
* @return The DBObject
*/
private DBObject ensureObject(DBCollection col, String symbol) {
DBObject dbo = null;
DBObject query = new BasicDBObject("symbol", symbol);
dbo = col.findOne(query);
if(dbo == null) {
String nspc = col.getName();
dbo = new BasicDBObject();
dbo.put("nspc", nspc);
dbo.put("symbol", symbol);
dbo.put("born", new Long(System.currentTimeMillis()));
dbo.put("statement", "");
col.insert(dbo);
}
return dbo;
}
/**
* Returns the query to get an object from the collection.
* @param symbol The symbol
* @return The DBObject
*/
private DBObject symbolQuery(String symbol) {
return new BasicDBObject("symbol", symbol);
}
//
// Parameter settings
//
/**
* @return the host
*/
public String getHost() {
return host;
}
/**
* @param host the host to set
*/
public void setHost(String host) {
this.host = host;
}
/**
* @return the port
*/
public int getPort() {
return port;
}
/**
* @param port the port to set
*/
public void setPort(int port) {
this.port = port;
}
}
| |
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.test.web.client.match;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.springframework.http.HttpMethod;
import org.springframework.mock.http.client.MockClientHttpRequest;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.hamcrest.Matchers.containsString;
/**
* Unit tests for {@link MockRestRequestMatchers}.
*
* @author Craig Walls
* @author Rossen Stoyanchev
* @author Sam Brannen
*/
public class MockRestRequestMatchersTests {
private final MockClientHttpRequest request = new MockClientHttpRequest();
@Test
public void requestTo() throws Exception {
this.request.setURI(new URI("http://www.foo.example/bar"));
MockRestRequestMatchers.requestTo("http://www.foo.example/bar").match(this.request);
}
@Test // SPR-15819
public void requestToUriTemplate() throws Exception {
this.request.setURI(new URI("http://www.foo.example/bar"));
MockRestRequestMatchers.requestToUriTemplate("http://www.foo.example/{bar}", "bar").match(this.request);
}
@Test
public void requestToNoMatch() throws Exception {
this.request.setURI(new URI("http://www.foo.example/bar"));
assertThatThrownBy(
() -> MockRestRequestMatchers.requestTo("http://www.foo.example/wrong").match(this.request))
.isInstanceOf(AssertionError.class);
}
@Test
public void requestToContains() throws Exception {
this.request.setURI(new URI("http://www.foo.example/bar"));
MockRestRequestMatchers.requestTo(containsString("bar")).match(this.request);
}
@Test
public void method() throws Exception {
this.request.setMethod(HttpMethod.GET);
MockRestRequestMatchers.method(HttpMethod.GET).match(this.request);
}
@Test
public void methodNoMatch() throws Exception {
this.request.setMethod(HttpMethod.POST);
assertThatThrownBy(() -> MockRestRequestMatchers.method(HttpMethod.GET).match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("expected:<GET> but was:<POST>");
}
@Test
public void header() throws Exception {
this.request.getHeaders().put("foo", Arrays.asList("bar", "baz"));
MockRestRequestMatchers.header("foo", "bar", "baz").match(this.request);
}
@Test
public void headerDoesNotExist() throws Exception {
MockRestRequestMatchers.headerDoesNotExist(null).match(this.request);
MockRestRequestMatchers.headerDoesNotExist("").match(this.request);
MockRestRequestMatchers.headerDoesNotExist("foo").match(this.request);
List<String> values = Arrays.asList("bar", "baz");
this.request.getHeaders().put("foo", values);
assertThatThrownBy(() -> MockRestRequestMatchers.headerDoesNotExist("foo").match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessage("Expected header <foo> not to exist, but it exists with values: " + values);
}
@Test
public void headerMissing() throws Exception {
assertThatThrownBy(() -> MockRestRequestMatchers.header("foo", "bar").match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("was null");
}
@Test
public void headerMissingValue() throws Exception {
this.request.getHeaders().put("foo", Arrays.asList("bar", "baz"));
assertThatThrownBy(() -> MockRestRequestMatchers.header("foo", "bad").match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("expected:<bad> but was:<bar>");
}
@Test
public void headerContains() throws Exception {
this.request.getHeaders().put("foo", Arrays.asList("bar", "baz"));
MockRestRequestMatchers.header("foo", containsString("ba")).match(this.request);
}
@Test
public void headerContainsWithMissingHeader() throws Exception {
assertThatThrownBy(() -> MockRestRequestMatchers.header("foo", containsString("baz")).match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("but was null");
}
@Test
public void headerContainsWithMissingValue() throws Exception {
this.request.getHeaders().put("foo", Arrays.asList("bar", "baz"));
assertThatThrownBy(() -> MockRestRequestMatchers.header("foo", containsString("bx")).match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("was \"bar\"");
}
@Test
public void headers() throws Exception {
this.request.getHeaders().put("foo", Arrays.asList("bar", "baz"));
MockRestRequestMatchers.header("foo", "bar", "baz").match(this.request);
}
@Test
public void headersWithMissingHeader() throws Exception {
assertThatThrownBy(() -> MockRestRequestMatchers.header("foo", "bar").match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("but was null");
}
@Test
public void headersWithMissingValue() throws Exception {
this.request.getHeaders().put("foo", Collections.singletonList("bar"));
assertThatThrownBy(() -> MockRestRequestMatchers.header("foo", "bar", "baz").match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("to have at least <2> values");
}
@Test
public void queryParam() throws Exception {
this.request.setURI(new URI("http://www.foo.example/a?foo=bar&foo=baz"));
MockRestRequestMatchers.queryParam("foo", "bar", "baz").match(this.request);
}
@Test
public void queryParamMissing() throws Exception {
this.request.setURI(new URI("http://www.foo.example/a"));
assertThatThrownBy(() -> MockRestRequestMatchers.queryParam("foo", "bar").match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("but was null");
}
@Test
public void queryParamMissingValue() throws Exception {
this.request.setURI(new URI("http://www.foo.example/a?foo=bar&foo=baz"));
assertThatThrownBy(() -> MockRestRequestMatchers.queryParam("foo", "bad").match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("expected:<bad> but was:<bar>");
}
@Test
public void queryParamContains() throws Exception {
this.request.setURI(new URI("http://www.foo.example/a?foo=bar&foo=baz"));
MockRestRequestMatchers.queryParam("foo", containsString("ba")).match(this.request);
}
@Test
public void queryParamContainsWithMissingValue() throws Exception {
this.request.setURI(new URI("http://www.foo.example/a?foo=bar&foo=baz"));
assertThatThrownBy(() -> MockRestRequestMatchers.queryParam("foo", containsString("bx")).match(this.request))
.isInstanceOf(AssertionError.class)
.hasMessageContaining("was \"bar\"");
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.model.EmptyTargetConfiguration;
import com.facebook.buck.core.model.FlavorDomain;
import com.facebook.buck.core.model.targetgraph.TargetGraph;
import com.facebook.buck.core.model.targetgraph.TargetGraphAndBuildTargets;
import com.facebook.buck.core.model.targetgraph.TargetGraphFactory;
import com.facebook.buck.core.model.targetgraph.TargetNode;
import com.facebook.buck.core.model.targetgraph.impl.TargetNodes;
import com.facebook.buck.core.parser.buildtargetparser.ParsingUnconfiguredBuildTargetFactory;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver;
import com.facebook.buck.core.sourcepath.resolver.impl.DefaultSourcePathResolver;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.CxxPlatformUtils;
import com.facebook.buck.cxx.toolchain.StaticUnresolvedCxxPlatform;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.coercer.DefaultTypeCoercerFactory;
import com.facebook.buck.rules.macros.CcFlagsMacro;
import com.facebook.buck.rules.macros.CcMacro;
import com.facebook.buck.rules.macros.CppFlagsMacro;
import com.facebook.buck.rules.macros.CxxFlagsMacro;
import com.facebook.buck.rules.macros.CxxMacro;
import com.facebook.buck.rules.macros.CxxppFlagsMacro;
import com.facebook.buck.rules.macros.LdMacro;
import com.facebook.buck.rules.macros.LdflagsSharedFilterMacro;
import com.facebook.buck.rules.macros.LdflagsSharedMacro;
import com.facebook.buck.rules.macros.LdflagsStaticFilterMacro;
import com.facebook.buck.rules.macros.LdflagsStaticPicFilterMacro;
import com.facebook.buck.rules.macros.LocationMacro;
import com.facebook.buck.rules.macros.Macro;
import com.facebook.buck.rules.macros.MacroContainer;
import com.facebook.buck.rules.macros.StringWithMacrosUtils;
import com.facebook.buck.shell.Genrule;
import com.facebook.buck.testutil.OptionalMatchers;
import com.facebook.buck.util.Optionals;
import com.facebook.buck.util.RichStream;
import com.facebook.buck.versions.NaiveVersionSelector;
import com.facebook.buck.versions.ParallelVersionedTargetGraphBuilder;
import com.facebook.buck.versions.VersionPropagatorBuilder;
import com.facebook.buck.versions.VersionedAliasBuilder;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.util.Optional;
import java.util.function.BiFunction;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.hamcrest.Matchers;
import org.junit.Test;
public class CxxGenruleDescriptionTest {
private static final int NUMBER_OF_THREADS = 1;
@Test
public void toolPlatformParseTimeDeps() {
for (Macro macro : ImmutableList.of(LdMacro.of(), CcMacro.of(), CxxMacro.of())) {
CxxGenruleBuilder builder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:rule#default"))
.setCmd(StringWithMacrosUtils.format("%s", macro))
.setOut("foo");
assertThat(
ImmutableSet.copyOf(builder.findImplicitDeps()),
Matchers.equalTo(
ImmutableSet.copyOf(
CxxPlatformUtils.DEFAULT_UNRESOLVED_PLATFORM.getParseTimeDeps(
EmptyTargetConfiguration.INSTANCE))));
}
}
@Test
public void ldFlagsFilter() {
for (BiFunction<Optional<Pattern>, ImmutableList<BuildTarget>, Macro> macro :
ImmutableList.<BiFunction<Optional<Pattern>, ImmutableList<BuildTarget>, Macro>>of(
LdflagsSharedFilterMacro::of,
LdflagsStaticFilterMacro::of,
LdflagsStaticPicFilterMacro::of)) {
CxxLibraryBuilder bBuilder =
new CxxLibraryBuilder(BuildTargetFactory.newInstance("//:b"))
.setExportedLinkerFlags(ImmutableList.of(StringWithMacrosUtils.format("-b")));
CxxLibraryBuilder aBuilder =
new CxxLibraryBuilder(BuildTargetFactory.newInstance("//:a"))
.setExportedDeps(ImmutableSortedSet.of(bBuilder.getTarget()))
.setExportedLinkerFlags(ImmutableList.of(StringWithMacrosUtils.format("-a")));
CxxGenruleBuilder builder =
new CxxGenruleBuilder(
BuildTargetFactory.newInstance(
"//:rule#" + CxxPlatformUtils.DEFAULT_PLATFORM_FLAVOR))
.setOut("out")
.setCmd(
StringWithMacrosUtils.format(
"%s",
macro.apply(
Optional.of(Pattern.compile("//:a")),
ImmutableList.of(aBuilder.getTarget()))));
TargetGraph targetGraph =
TargetGraphFactory.newInstance(bBuilder.build(), aBuilder.build(), builder.build());
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder(targetGraph);
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(graphBuilder));
bBuilder.build(graphBuilder);
aBuilder.build(graphBuilder);
Genrule genrule = (Genrule) builder.build(graphBuilder);
assertThat(
Joiner.on(' ')
.join(Arg.stringify(ImmutableList.of(genrule.getCmd().get()), pathResolver)),
Matchers.containsString("-a"));
assertThat(
Joiner.on(' ')
.join(Arg.stringify(ImmutableList.of(genrule.getCmd().get()), pathResolver)),
Matchers.not(Matchers.containsString("-b")));
}
}
@Test
public void cppflagsNoArgs() {
CxxPlatform cxxPlatform =
CxxPlatformUtils.DEFAULT_PLATFORM.withCppflags("-cppflag").withCxxppflags("-cxxppflag");
CxxGenruleBuilder builder =
new CxxGenruleBuilder(
BuildTargetFactory.newInstance("//:rule#" + cxxPlatform.getFlavor()),
new FlavorDomain<>(
"C/C++ Platform",
ImmutableMap.of(
cxxPlatform.getFlavor(), new StaticUnresolvedCxxPlatform(cxxPlatform))))
.setOut("out")
.setCmd(
StringWithMacrosUtils.format(
"%s %s",
CppFlagsMacro.of(Optional.empty(), ImmutableList.of()),
CxxppFlagsMacro.of(Optional.empty(), ImmutableList.of())));
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder(targetGraph);
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(graphBuilder));
Genrule genrule = (Genrule) builder.build(graphBuilder);
assertThat(
Joiner.on(' ').join(Arg.stringify(ImmutableList.of(genrule.getCmd().get()), pathResolver)),
Matchers.containsString("-cppflag -cxxppflag"));
}
@Test
public void cflagsNoArgs() {
CxxPlatform cxxPlatform =
CxxPlatformUtils.DEFAULT_PLATFORM
.withAsflags("-asflag")
.withCflags("-cflag")
.withCxxflags("-cxxflag");
CxxGenruleBuilder builder =
new CxxGenruleBuilder(
BuildTargetFactory.newInstance("//:rule#" + cxxPlatform.getFlavor()),
new FlavorDomain<>(
"C/C++ Platform",
ImmutableMap.of(
cxxPlatform.getFlavor(), new StaticUnresolvedCxxPlatform(cxxPlatform))))
.setOut("out")
.setCmd(StringWithMacrosUtils.format("%s %s", CcFlagsMacro.of(), CxxFlagsMacro.of()));
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder(targetGraph);
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(graphBuilder));
Genrule genrule = (Genrule) builder.build(graphBuilder);
for (String expected : ImmutableList.of("-asflag", "-cflag", "-cxxflag")) {
assertThat(
Joiner.on(' ')
.join(Arg.stringify(ImmutableList.of(genrule.getCmd().get()), pathResolver)),
Matchers.containsString(expected));
}
}
@Test
public void versionedTargetReferenceIsTranslatedInVersionedGraph() throws Exception {
VersionPropagatorBuilder dep = new VersionPropagatorBuilder("//:dep");
VersionedAliasBuilder versionedDep =
new VersionedAliasBuilder("//:versioned").setVersions("1.0", "//:dep");
CxxGenruleBuilder genruleBuilder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:genrule"))
.setCmd(
StringWithMacrosUtils.format(
"%s",
LdflagsSharedMacro.of(
Optional.empty(), ImmutableList.of(versionedDep.getTarget()))))
.setOut("foo");
TargetGraph graph =
TargetGraphFactory.newInstance(dep.build(), versionedDep.build(), genruleBuilder.build());
TargetGraphAndBuildTargets transformed =
ParallelVersionedTargetGraphBuilder.transform(
new NaiveVersionSelector(),
TargetGraphAndBuildTargets.of(graph, ImmutableSet.of(genruleBuilder.getTarget())),
NUMBER_OF_THREADS,
new DefaultTypeCoercerFactory(),
new ParsingUnconfiguredBuildTargetFactory(),
20);
CxxGenruleDescriptionArg arg =
extractArg(
transformed.getTargetGraph().get(genruleBuilder.getTarget()),
CxxGenruleDescriptionArg.class);
assertThat(
arg.getCmd(),
OptionalMatchers.present(
Matchers.equalTo(
StringWithMacrosUtils.format(
"%s",
LdflagsSharedMacro.of(Optional.empty(), ImmutableList.of(dep.getTarget()))))));
}
@Test
public void versionPropagatorTargetReferenceIsTranslatedInVersionedGraph() throws Exception {
VersionPropagatorBuilder transitiveDep = new VersionPropagatorBuilder("//:transitive_dep");
VersionedAliasBuilder versionedDep =
new VersionedAliasBuilder("//:versioned").setVersions("1.0", "//:transitive_dep");
VersionPropagatorBuilder dep = new VersionPropagatorBuilder("//:dep").setDeps("//:versioned");
CxxGenruleBuilder genruleBuilder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:genrule"))
.setCmd(
StringWithMacrosUtils.format(
"%s",
LdflagsSharedMacro.of(Optional.empty(), ImmutableList.of(dep.getTarget()))))
.setOut("foo");
TargetGraph graph =
TargetGraphFactory.newInstance(
transitiveDep.build(), versionedDep.build(), dep.build(), genruleBuilder.build());
TargetGraphAndBuildTargets transformed =
ParallelVersionedTargetGraphBuilder.transform(
new NaiveVersionSelector(),
TargetGraphAndBuildTargets.of(graph, ImmutableSet.of(genruleBuilder.getTarget())),
NUMBER_OF_THREADS,
new DefaultTypeCoercerFactory(),
new ParsingUnconfiguredBuildTargetFactory(),
20);
CxxGenruleDescriptionArg arg =
extractArg(
transformed.getTargetGraph().get(genruleBuilder.getTarget()),
CxxGenruleDescriptionArg.class);
assertThat(
RichStream.from(arg.getCmd().orElseThrow(AssertionError::new).getMacros())
.map(MacroContainer::getMacro)
.filter(LdflagsSharedMacro.class)
.flatMap(m -> m.getTargets().stream())
.map(BuildTarget::getFullyQualifiedName)
.collect(Collectors.toList()),
Matchers.contains(Matchers.matchesPattern(Pattern.quote("//:dep#v") + "[a-zA-Z0-9]*")));
}
@Test
public void cxxGenruleInLocationMacro() {
CxxGenruleBuilder depBuilder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:dep")).setOut("out");
CxxGenruleBuilder builder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:rule"))
.setCmd(StringWithMacrosUtils.format("%s", LocationMacro.of(depBuilder.getTarget())))
.setOut("out");
TargetGraph targetGraph = TargetGraphFactory.newInstance(depBuilder.build(), builder.build());
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder(targetGraph);
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(graphBuilder);
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(graphBuilder));
CxxGenrule dep = (CxxGenrule) graphBuilder.requireRule(depBuilder.getTarget());
CxxGenrule rule = (CxxGenrule) graphBuilder.requireRule(builder.getTarget());
Genrule genrule =
(Genrule)
ruleFinder
.getRule(rule.getGenrule(CxxPlatformUtils.DEFAULT_PLATFORM, graphBuilder))
.orElseThrow(AssertionError::new);
assertThat(
Arg.stringify(Optionals.toStream(genrule.getCmd()).toOnceIterable(), pathResolver),
Matchers.contains(
pathResolver
.getAbsolutePath(dep.getGenrule(CxxPlatformUtils.DEFAULT_PLATFORM, graphBuilder))
.toString()));
}
@Test
public void isCacheable() {
CxxGenruleBuilder builder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:rule"))
.setOut("out")
.setCmd(StringWithMacrosUtils.format("touch $OUT"))
.setCacheable(false);
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder(targetGraph);
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(graphBuilder);
CxxGenrule rule = (CxxGenrule) graphBuilder.requireRule(builder.getTarget());
Genrule genrule =
(Genrule)
ruleFinder
.getRule(rule.getGenrule(CxxPlatformUtils.DEFAULT_PLATFORM, graphBuilder))
.orElseThrow(AssertionError::new);
assertFalse(genrule.isCacheable());
}
private static <U> U extractArg(TargetNode<?> node, Class<U> clazz) {
return TargetNodes.castArg(node, clazz)
.orElseThrow(
() ->
new AssertionError(
String.format(
"%s: expected constructor arg to be of type %s (was %s)",
node, clazz, node.getConstructorArg().getClass())))
.getConstructorArg();
}
}
| |
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2019 Guardsquare NV
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard;
import java.io.File;
import java.net.*;
/**
* This class checks whether the output is up to date.
*
* @author Eric Lafortune
*/
public class UpToDateChecker
{
private final Configuration configuration;
/**
* Creates a new UpToDateChecker with the given configuration.
*/
public UpToDateChecker(Configuration configuration)
{
this.configuration = configuration;
}
/**
* Returns whether the output is up to date, based on the modification times
* of the input jars, output jars, and library jars (or directories).
*/
public boolean check()
{
try
{
ModificationTimeChecker checker = new ModificationTimeChecker();
checker.updateInputModificationTime(configuration.lastModified);
ClassPath programJars = configuration.programJars;
ClassPath libraryJars = configuration.libraryJars;
// Check the dates of the program jars, if any.
if (programJars != null)
{
for (int index = 0; index < programJars.size(); index++)
{
// Update the input and output modification times.
ClassPathEntry classPathEntry = programJars.get(index);
checker.updateModificationTime(classPathEntry.getFile(),
classPathEntry.isOutput());
}
}
// Check the dates of the library jars, if any.
if (libraryJars != null)
{
for (int index = 0; index < libraryJars.size(); index++)
{
// Update the input modification time.
ClassPathEntry classPathEntry = libraryJars.get(index);
checker.updateModificationTime(classPathEntry.getFile(),
false);
}
}
// Check the dates of the auxiliary input files.
checker.updateInputModificationTime(configuration.applyMapping);
checker.updateInputModificationTime(configuration.obfuscationDictionary);
checker.updateInputModificationTime(configuration.classObfuscationDictionary);
checker.updateInputModificationTime(configuration.packageObfuscationDictionary);
// Check the dates of the auxiliary output files.
checker.updateOutputModificationTime(configuration.printSeeds);
checker.updateOutputModificationTime(configuration.printUsage);
checker.updateOutputModificationTime(configuration.printMapping);
checker.updateOutputModificationTime(configuration.printConfiguration);
checker.updateOutputModificationTime(configuration.dump);
}
catch (IllegalStateException e)
{
// The output is outdated.
return false;
}
System.out.println("The output seems up to date");
return true;
}
/**
* This class maintains the modification times of input and output.
* The methods throw an IllegalStateException if the output appears
* outdated.
*/
private static class ModificationTimeChecker {
private long inputModificationTime = Long.MIN_VALUE;
private long outputModificationTime = Long.MAX_VALUE;
/**
* Updates the input modification time based on the given file or
* directory (recursively).
*/
public void updateInputModificationTime(URL url)
{
if (url != null &&
url.getProtocol().equals("file"))
{
try
{
updateModificationTime(new File(url.toURI()), false);
}
catch (URISyntaxException ignore) {}
}
}
/**
* Updates the input modification time based on the given file or
* directory (recursively).
*/
public void updateInputModificationTime(File file)
{
if (file != null)
{
updateModificationTime(file, false);
}
}
/**
* Updates the input modification time based on the given file or
* directory (recursively).
*/
public void updateOutputModificationTime(File file)
{
if (file != null && file.getName().length() > 0)
{
updateModificationTime(file, true);
}
}
/**
* Updates the specified modification time based on the given file or
* directory (recursively).
*/
public void updateModificationTime(File file, boolean isOutput)
{
// Is it a directory?
if (file.isDirectory())
{
// Ignore the directory's modification time; just recurse on
// its files.
File[] files = file.listFiles();
// Still, an empty output directory is probably a sign that it
// is not up to date.
if (files.length == 0 && isOutput)
{
updateOutputModificationTime(Long.MIN_VALUE);
}
for (int index = 0; index < files.length; index++)
{
updateModificationTime(files[index], isOutput);
}
}
else
{
// Update with the file's modification time.
updateModificationTime(file.lastModified(), isOutput);
}
}
/**
* Updates the specified modification time.
*/
public void updateModificationTime(long time, boolean isOutput)
{
if (isOutput)
{
updateOutputModificationTime(time);
}
else
{
updateInputModificationTime(time);
}
}
/**
* Updates the input modification time.
*/
public void updateInputModificationTime(long time)
{
if (inputModificationTime < time)
{
inputModificationTime = time;
checkModificationTimes();
}
}
/**
* Updates the output modification time.
*/
public void updateOutputModificationTime(long time)
{
if (outputModificationTime > time)
{
outputModificationTime = time;
checkModificationTimes();
}
}
private void checkModificationTimes()
{
if (inputModificationTime > outputModificationTime)
{
throw new IllegalStateException("The output is outdated");
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ExecutorService;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.RegionLocations;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService;
/**
* An internal class that delegates to an {@link HConnection} instance.
* A convenience to override when customizing method implementations.
*
*
* @see ConnectionUtils#createShortCircuitHConnection(HConnection, ServerName,
* AdminService.BlockingInterface, ClientService.BlockingInterface) for case where we make
* Connections skip RPC if request is to local server.
*/
@InterfaceAudience.Private
@SuppressWarnings("deprecation")
//NOTE: DO NOT make this class public. It was made package-private on purpose.
abstract class ConnectionAdapter implements ClusterConnection {
private final ClusterConnection wrappedConnection;
public ConnectionAdapter(Connection c) {
wrappedConnection = (ClusterConnection)c;
}
@Override
public void abort(String why, Throwable e) {
wrappedConnection.abort(why, e);
}
@Override
public boolean isAborted() {
return wrappedConnection.isAborted();
}
@Override
public void close() throws IOException {
wrappedConnection.close();
}
@Override
public Configuration getConfiguration() {
return wrappedConnection.getConfiguration();
}
@Override
public HTableInterface getTable(String tableName) throws IOException {
return wrappedConnection.getTable(tableName);
}
@Override
public HTableInterface getTable(byte[] tableName) throws IOException {
return wrappedConnection.getTable(tableName);
}
@Override
public HTableInterface getTable(TableName tableName) throws IOException {
return wrappedConnection.getTable(tableName);
}
@Override
public HTableInterface getTable(String tableName, ExecutorService pool)
throws IOException {
return wrappedConnection.getTable(tableName, pool);
}
@Override
public HTableInterface getTable(byte[] tableName, ExecutorService pool)
throws IOException {
return wrappedConnection.getTable(tableName, pool);
}
@Override
public HTableInterface getTable(TableName tableName, ExecutorService pool)
throws IOException {
return wrappedConnection.getTable(tableName, pool);
}
@Override
public RegionLocator getRegionLocator(TableName tableName) throws IOException {
return wrappedConnection.getRegionLocator(tableName);
}
@Override
public Admin getAdmin() throws IOException {
return wrappedConnection.getAdmin();
}
@Override
public boolean isMasterRunning() throws MasterNotRunningException,
ZooKeeperConnectionException {
return wrappedConnection.isMasterRunning();
}
@Override
public boolean isTableEnabled(TableName tableName) throws IOException {
return wrappedConnection.isTableEnabled(tableName);
}
@Override
public boolean isTableEnabled(byte[] tableName) throws IOException {
return wrappedConnection.isTableEnabled(tableName);
}
@Override
public boolean isTableDisabled(TableName tableName) throws IOException {
return wrappedConnection.isTableDisabled(tableName);
}
@Override
public boolean isTableDisabled(byte[] tableName) throws IOException {
return wrappedConnection.isTableDisabled(tableName);
}
@Override
public boolean isTableAvailable(TableName tableName) throws IOException {
return wrappedConnection.isTableAvailable(tableName);
}
@Override
public boolean isTableAvailable(byte[] tableName) throws IOException {
return wrappedConnection.isTableAvailable(tableName);
}
@Override
public boolean isTableAvailable(TableName tableName, byte[][] splitKeys)
throws IOException {
return wrappedConnection.isTableAvailable(tableName, splitKeys);
}
@Override
public boolean isTableAvailable(byte[] tableName, byte[][] splitKeys)
throws IOException {
return wrappedConnection.isTableAvailable(tableName, splitKeys);
}
@Override
public TableState getTableState(TableName tableName) throws IOException {
return wrappedConnection.getTableState(tableName);
}
@Override
public HTableDescriptor[] listTables() throws IOException {
return wrappedConnection.listTables();
}
@Override
public String[] getTableNames() throws IOException {
return wrappedConnection.getTableNames();
}
@Override
public TableName[] listTableNames() throws IOException {
return wrappedConnection.listTableNames();
}
@Override
public HTableDescriptor getHTableDescriptor(TableName tableName)
throws IOException {
return wrappedConnection.getHTableDescriptor(tableName);
}
@Override
public HTableDescriptor getHTableDescriptor(byte[] tableName)
throws IOException {
return wrappedConnection.getHTableDescriptor(tableName);
}
@Override
public HRegionLocation locateRegion(TableName tableName, byte[] row)
throws IOException {
return wrappedConnection.locateRegion(tableName, row);
}
@Override
public HRegionLocation locateRegion(byte[] tableName, byte[] row)
throws IOException {
return wrappedConnection.locateRegion(tableName, row);
}
@Override
public RegionLocations locateRegion(TableName tableName, byte[] row, boolean useCache,
boolean retry) throws IOException {
return wrappedConnection.locateRegion(tableName, row, useCache, retry);
}
@Override
public void clearRegionCache() {
wrappedConnection.clearRegionCache();
}
@Override
public void clearRegionCache(TableName tableName) {
wrappedConnection.clearRegionCache(tableName);
}
@Override
public void clearRegionCache(byte[] tableName) {
wrappedConnection.clearRegionCache(tableName);
}
@Override
public void deleteCachedRegionLocation(HRegionLocation location) {
wrappedConnection.deleteCachedRegionLocation(location);
}
@Override
public HRegionLocation relocateRegion(TableName tableName, byte[] row)
throws IOException {
return wrappedConnection.relocateRegion(tableName, row);
}
@Override
public HRegionLocation relocateRegion(byte[] tableName, byte[] row)
throws IOException {
return wrappedConnection.relocateRegion(tableName, row);
}
@Override
public void updateCachedLocations(TableName tableName, byte[] rowkey,
Object exception, HRegionLocation source) {
wrappedConnection.updateCachedLocations(tableName, rowkey, exception, source);
}
@Override
public void updateCachedLocations(TableName tableName, byte[] regionName, byte[] rowkey,
Object exception, ServerName source) {
wrappedConnection.updateCachedLocations(tableName, regionName, rowkey, exception, source);
}
@Override
public void updateCachedLocations(byte[] tableName, byte[] rowkey,
Object exception, HRegionLocation source) {
wrappedConnection.updateCachedLocations(tableName, rowkey, exception, source);
}
@Override
public HRegionLocation locateRegion(byte[] regionName) throws IOException {
return wrappedConnection.locateRegion(regionName);
}
@Override
public List<HRegionLocation> locateRegions(TableName tableName)
throws IOException {
return wrappedConnection.locateRegions(tableName);
}
@Override
public List<HRegionLocation> locateRegions(byte[] tableName)
throws IOException {
return wrappedConnection.locateRegions(tableName);
}
@Override
public List<HRegionLocation> locateRegions(TableName tableName,
boolean useCache, boolean offlined) throws IOException {
return wrappedConnection.locateRegions(tableName, useCache, offlined);
}
@Override
public List<HRegionLocation> locateRegions(byte[] tableName,
boolean useCache, boolean offlined) throws IOException {
return wrappedConnection.locateRegions(tableName, useCache, offlined);
}
@Override
public RegionLocations locateRegion(TableName tableName, byte[] row, boolean useCache,
boolean retry, int replicaId) throws IOException {
return wrappedConnection.locateRegion(tableName, row, useCache, retry, replicaId);
}
@Override
public RegionLocations relocateRegion(TableName tableName, byte[] row, int replicaId)
throws IOException {
return wrappedConnection.relocateRegion(tableName, row, replicaId);
}
@Override
public MasterService.BlockingInterface getMaster() throws IOException {
return wrappedConnection.getMaster();
}
@Override
public AdminService.BlockingInterface getAdmin(
ServerName serverName) throws IOException {
return wrappedConnection.getAdmin(serverName);
}
@Override
public ClientService.BlockingInterface getClient(
ServerName serverName) throws IOException {
return wrappedConnection.getClient(serverName);
}
@Override
public AdminService.BlockingInterface getAdmin(
ServerName serverName, boolean getMaster) throws IOException {
return wrappedConnection.getAdmin(serverName, getMaster);
}
@Override
public HRegionLocation getRegionLocation(TableName tableName, byte[] row,
boolean reload) throws IOException {
return wrappedConnection.getRegionLocation(tableName, row, reload);
}
@Override
public HRegionLocation getRegionLocation(byte[] tableName, byte[] row,
boolean reload) throws IOException {
return wrappedConnection.getRegionLocation(tableName, row, reload);
}
@Override
public void processBatch(List<? extends Row> actions, TableName tableName,
ExecutorService pool, Object[] results) throws IOException,
InterruptedException {
wrappedConnection.processBatch(actions, tableName, pool, results);
}
@Override
public void processBatch(List<? extends Row> actions, byte[] tableName,
ExecutorService pool, Object[] results) throws IOException,
InterruptedException {
wrappedConnection.processBatch(actions, tableName, pool, results);
}
@Override
public <R> void processBatchCallback(List<? extends Row> list,
TableName tableName, ExecutorService pool, Object[] results,
Callback<R> callback) throws IOException, InterruptedException {
wrappedConnection.processBatchCallback(list, tableName, pool, results, callback);
}
@Override
public <R> void processBatchCallback(List<? extends Row> list,
byte[] tableName, ExecutorService pool, Object[] results,
Callback<R> callback) throws IOException, InterruptedException {
wrappedConnection.processBatchCallback(list, tableName, pool, results, callback);
}
@Override
public void setRegionCachePrefetch(TableName tableName, boolean enable) {
wrappedConnection.setRegionCachePrefetch(tableName, enable);
}
@Override
public void setRegionCachePrefetch(byte[] tableName, boolean enable) {
wrappedConnection.setRegionCachePrefetch(tableName, enable);
}
@Override
public boolean getRegionCachePrefetch(TableName tableName) {
return wrappedConnection.getRegionCachePrefetch(tableName);
}
@Override
public boolean getRegionCachePrefetch(byte[] tableName) {
return wrappedConnection.getRegionCachePrefetch(tableName);
}
@Override
public int getCurrentNrHRS() throws IOException {
return wrappedConnection.getCurrentNrHRS();
}
@Override
public HTableDescriptor[] getHTableDescriptorsByTableName(
List<TableName> tableNames) throws IOException {
return wrappedConnection.getHTableDescriptorsByTableName(tableNames);
}
@Override
public HTableDescriptor[] getHTableDescriptors(List<String> tableNames)
throws IOException {
return wrappedConnection.getHTableDescriptors(tableNames);
}
@Override
public boolean isClosed() {
return wrappedConnection.isClosed();
}
@Override
public void clearCaches(ServerName sn) {
wrappedConnection.clearCaches(sn);
}
@Override
public MasterKeepAliveConnection getKeepAliveMasterService()
throws MasterNotRunningException {
return wrappedConnection.getKeepAliveMasterService();
}
@Override
public boolean isDeadServer(ServerName serverName) {
return wrappedConnection.isDeadServer(serverName);
}
@Override
public NonceGenerator getNonceGenerator() {
return wrappedConnection.getNonceGenerator();
}
@Override
public AsyncProcess getAsyncProcess() {
return wrappedConnection.getAsyncProcess();
}
@Override
public RpcRetryingCallerFactory getNewRpcRetryingCallerFactory(Configuration conf) {
return wrappedConnection.getNewRpcRetryingCallerFactory(conf);
}
@Override
public boolean isManaged() {
return wrappedConnection.isManaged();
}
@Override
public ServerStatisticTracker getStatisticsTracker() {
return wrappedConnection.getStatisticsTracker();
}
@Override
public ClientBackoffPolicy getBackoffPolicy() {
return wrappedConnection.getBackoffPolicy();
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.ec2.model.transform.CreateSnapshotRequestMarshaller;
/**
* <p>
* Contains the parameters for CreateSnapshot.
* </p>
*/
public class CreateSnapshotRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable, DryRunSupportedRequest<CreateSnapshotRequest> {
/**
* <p>
* The ID of the EBS volume.
* </p>
*/
private String volumeId;
/**
* <p>
* A description for the snapshot.
* </p>
*/
private String description;
/**
* Default constructor for CreateSnapshotRequest object. Callers should use
* the setter or fluent setter (with...) methods to initialize the object
* after creating it.
*/
public CreateSnapshotRequest() {
}
/**
* Constructs a new CreateSnapshotRequest object. Callers should use the
* setter or fluent setter (with...) methods to initialize any additional
* object members.
*
* @param volumeId
* The ID of the EBS volume.
* @param description
* A description for the snapshot.
*/
public CreateSnapshotRequest(String volumeId, String description) {
setVolumeId(volumeId);
setDescription(description);
}
/**
* <p>
* The ID of the EBS volume.
* </p>
*
* @param volumeId
* The ID of the EBS volume.
*/
public void setVolumeId(String volumeId) {
this.volumeId = volumeId;
}
/**
* <p>
* The ID of the EBS volume.
* </p>
*
* @return The ID of the EBS volume.
*/
public String getVolumeId() {
return this.volumeId;
}
/**
* <p>
* The ID of the EBS volume.
* </p>
*
* @param volumeId
* The ID of the EBS volume.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateSnapshotRequest withVolumeId(String volumeId) {
setVolumeId(volumeId);
return this;
}
/**
* <p>
* A description for the snapshot.
* </p>
*
* @param description
* A description for the snapshot.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* A description for the snapshot.
* </p>
*
* @return A description for the snapshot.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* A description for the snapshot.
* </p>
*
* @param description
* A description for the snapshot.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateSnapshotRequest withDescription(String description) {
setDescription(description);
return this;
}
/**
* This method is intended for internal use only. Returns the marshaled
* request configured with additional parameters to enable operation
* dry-run.
*/
@Override
public Request<CreateSnapshotRequest> getDryRunRequest() {
Request<CreateSnapshotRequest> request = new CreateSnapshotRequestMarshaller()
.marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getVolumeId() != null)
sb.append("VolumeId: " + getVolumeId() + ",");
if (getDescription() != null)
sb.append("Description: " + getDescription());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateSnapshotRequest == false)
return false;
CreateSnapshotRequest other = (CreateSnapshotRequest) obj;
if (other.getVolumeId() == null ^ this.getVolumeId() == null)
return false;
if (other.getVolumeId() != null
&& other.getVolumeId().equals(this.getVolumeId()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null
&& other.getDescription().equals(this.getDescription()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getVolumeId() == null) ? 0 : getVolumeId().hashCode());
hashCode = prime
* hashCode
+ ((getDescription() == null) ? 0 : getDescription().hashCode());
return hashCode;
}
@Override
public CreateSnapshotRequest clone() {
return (CreateSnapshotRequest) super.clone();
}
}
| |
package com.cabatuan.breastfriend;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
import android.support.v4.app.NavUtils;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
<<<<<<< HEAD
=======
import android.util.Log;
>>>>>>> 35416f1914e6ec10b7c14a90236881adfd7e4a99
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import java.util.HashMap;
import java.util.Locale;
/**
* Created by cobalt on 10/19/15.
*/
public class RiskFactorsActivity extends AppCompatActivity implements TextToSpeech.OnInitListener{
public final static String TAG = "RiskFactorsActivity";
private ViewPager viewPager = null;
private MyPagerAdapter myPagerAdapter = null;
private TextToSpeech tts = null;
private boolean ttsLoaded = false;
private String[] infotext;
private int endPage = 0; // track end of viewpager
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_risk_factors);
// Extract String info array for slides
infotext = getResources().getStringArray(R.array.riskinfotext);
<<<<<<< HEAD
//Log.d(TAG, "infotext.length = " + infotext.length);
=======
Log.d(TAG, "infotext.length = " + infotext.length);
>>>>>>> 35416f1914e6ec10b7c14a90236881adfd7e4a99
// TextToSpeech engine initialization + View pager initialization
tts = new TextToSpeech(this /* context */, this /* listener */);
}
@Override
public void onInit(int status) {
if(status == TextToSpeech.SUCCESS) {
int temp = tts.setLanguage(Locale.US);
if (temp == TextToSpeech.LANG_MISSING_DATA ||
temp == TextToSpeech.LANG_NOT_SUPPORTED) {
<<<<<<< HEAD
//Log.e(TAG, "Language is not available.");
=======
Log.e(TAG, "Language is not available.");
>>>>>>> 35416f1914e6ec10b7c14a90236881adfd7e4a99
ttsLoaded = false;
}
else {
ttsLoaded = true;
tts.setSpeechRate(0.8f);
// Initialize view pager
initializePager();
}
}
else{
Toast.makeText(this, "TTS Initialization failed", Toast.LENGTH_LONG).show();
}
}
public void initializePager(){
viewPager = (ViewPager)findViewById(R.id.riskviewpager);
myPagerAdapter = new MyPagerAdapter();
viewPager.setAdapter(myPagerAdapter);
viewPager.setPageTransformer(true, new ZoomOutPageTransformer());
viewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
readMessage(infotext[position], "page" + position);
if (position == myPagerAdapter.getCount()-1){
++endPage;
}
if (endPage>2) {
endPage = 0;
String message = getString(R.string.title_risk_factors) + " completed!";
showToast(message);
NavUtils.navigateUpTo(RiskFactorsActivity.this, new Intent(RiskFactorsActivity.this, MainActivity.class));
}
}
private void showToast(String message) {
// Inflate the Layout
LayoutInflater inflater = getLayoutInflater();
View layout = inflater.inflate(R.layout.mytoast,
(ViewGroup) findViewById(R.id.custom_toast_layout));
// Retrieve the ImageView and TextView
ImageView iv = (ImageView) layout.findViewById(R.id.toastImageView);
TextView text = (TextView) layout.findViewById(R.id.textToShow);
// Set the image
iv.setImageResource(R.mipmap.ic_alert);
// Set the Text to show in TextView
text.setText(message);
text.setBackgroundColor(Color.BLACK);
final Toast toast = new Toast(getApplicationContext());
toast.setGravity(Gravity.CENTER_VERTICAL, 0, 0);
toast.setDuration(Toast.LENGTH_SHORT);
toast.setView(layout);
toast.show();
}
@Override
public void onPageSelected(int position) {
readMessage(infotext[position], "page" + position);
invalidateOptionsMenu();
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
}
public void readMessage(String message, String utteranceId) {
HashMap<String, String> params = new HashMap<>();
if (ttsLoaded) {
params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, utteranceId);
tts.speak(message, TextToSpeech.QUEUE_FLUSH, params);
}
}
@Override
public void onPause() {
if (tts != null) {
tts.stop();
}
super.onPause();
}
@Override
public void onStop() {
if (tts != null) {
tts.stop();
}
super.onStop();
}
@Override
public void onDestroy() {
if (tts != null) {
tts.stop();
tts.shutdown();
}
super.onDestroy();
}
@Override
public void onBackPressed() {
if (tts != null) {
tts.stop();
tts.shutdown();
}
// Go back to MainActivity
NavUtils.navigateUpTo(this, new Intent(this, MainActivity.class));
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.basic_facts_menu, menu);
if (viewPager == null){ // Solves null pointer exception
initializePager();
}
<<<<<<< HEAD
//Log.d(TAG, "myPagerAdapter.getCount() = " + myPagerAdapter.getCount());
=======
Log.d(TAG, "myPagerAdapter.getCount() = " + myPagerAdapter.getCount());
>>>>>>> 35416f1914e6ec10b7c14a90236881adfd7e4a99
menu.findItem(R.id.action_previous).setEnabled(viewPager.getCurrentItem() > 0);
// Add either a "next" or "finish" button to the action bar, depending on which page
// is currently selected.
MenuItem item = menu.add(Menu.NONE, R.id.action_next, Menu.NONE,
(viewPager.getCurrentItem() == myPagerAdapter.getCount() - 1)
? R.string.action_finish
: R.string.action_next);
item.setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM | MenuItem.SHOW_AS_ACTION_WITH_TEXT);
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
<<<<<<< HEAD
//Log.d(TAG,"home was pressed");
=======
Log.d(TAG,"home was pressed");
>>>>>>> 35416f1914e6ec10b7c14a90236881adfd7e4a99
// Navigate "up" the demo structure to the launchpad activity.
// See http://developer.android.com/design/patterns/navigation.html for more.
NavUtils.navigateUpTo(this, new Intent(this, MainActivity.class));
return true;
case R.id.action_previous:
<<<<<<< HEAD
//Log.d(TAG,"action_previous was pressed");
=======
Log.d(TAG,"action_previous was pressed");
>>>>>>> 35416f1914e6ec10b7c14a90236881adfd7e4a99
// Go to the previous step in the wizard. If there is no previous step,
// setCurrentItem will do nothing.
viewPager.setCurrentItem(viewPager.getCurrentItem() - 1);
return true;
case R.id.action_next:
<<<<<<< HEAD
//Log.d(TAG,"action_next was pressed");
=======
Log.d(TAG,"action_next was pressed");
>>>>>>> 35416f1914e6ec10b7c14a90236881adfd7e4a99
// Advance to the next step in the wizard. If there is no next step,
// go back to MainActivity.
if (viewPager.getCurrentItem() == myPagerAdapter.getCount() - 1)
NavUtils.navigateUpTo(this, new Intent(this, MainActivity.class));
else
viewPager.setCurrentItem(viewPager.getCurrentItem() + 1);
return true;
}
return super.onOptionsItemSelected(item);
}
private class MyPagerAdapter extends PagerAdapter {
private final int NUMBER_OF_PAGES = infotext.length;
private int pos;
private int[] res = {
R.drawable.woman,
R.drawable.oldlady,
R.drawable.family,
R.drawable.pills,
R.drawable.gene,
R.drawable.drink,
R.drawable.mammogram,
R.drawable.radioactive,
R.drawable.nosmoking};
private int[] backgroundcolor = {
0xFFFFFFFF,
0xFFFFFFFF,
0xFFFFFFFF,
0xFFFFFFFF,
0xFF000000,
0xFF000000,
0xFF101010,
0xFF000000,
0xFF000000};
@Override
public int getCount() {
return NUMBER_OF_PAGES;
}
public int getPosition() {
return pos;
}
@Override
public boolean isViewFromObject(View view, Object object) {
return view == object;
}
@Override
public Object instantiateItem(ViewGroup container, int position) {
pos = position;
TextView textView = new TextView(RiskFactorsActivity.this);
textView.setTextColor(getResources().getColor(R.color.pink));
textView.setTextSize(24);
textView.setGravity(Gravity.CENTER);
textView.setText(String.valueOf(position + 1) + ". " + infotext[position]);
ImageView imageView = new ImageView(RiskFactorsActivity.this);
imageView.setImageResource(res[position]);
ViewGroup.LayoutParams imageParams = new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
imageView.setLayoutParams(imageParams);
LinearLayout layout = new LinearLayout(RiskFactorsActivity.this);
layout.setOrientation(LinearLayout.VERTICAL);
layout.setGravity(Gravity.CENTER_VERTICAL);
ViewGroup.LayoutParams layoutParams = new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
layout.setBackgroundColor(backgroundcolor[position]);
layout.setLayoutParams(layoutParams);
layout.addView(textView);
layout.addView(imageView);
final int page = position;
// Listen for clicks
layout.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v) {
readMessage(infotext[page],"page" + page);
}});
container.addView(layout);
return layout;
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
container.removeView((LinearLayout)object);
}
}
}
| |
// This is a generated file. Not intended for manual editing.
package com.tang.intellij.lua.psi;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.PsiElement;
import com.intellij.lang.ASTNode;
import com.tang.intellij.lua.lang.LuaParserDefinitionKt;
import com.tang.intellij.lua.psi.impl.*;
public interface LuaTypes {
IElementType ARGS = LuaParserDefinitionKt.createType("ARGS");
IElementType ASSIGN_STAT = LuaParserDefinitionKt.createType("ASSIGN_STAT");
IElementType ATTRIBUTE = LuaParserDefinitionKt.createType("ATTRIBUTE");
IElementType BINARY_EXPR = LuaParserDefinitionKt.createType("BINARY_EXPR");
IElementType BINARY_OP = LuaParserDefinitionKt.createType("BINARY_OP");
IElementType BLOCK = LuaParserDefinitionKt.createType("BLOCK");
IElementType BREAK_STAT = LuaParserDefinitionKt.createType("BREAK_STAT");
IElementType CALL_EXPR = LuaParserDefinitionKt.createType("CALL_EXPR");
IElementType CLASS_METHOD_DEF = LuaParserDefinitionKt.createType("CLASS_METHOD_DEF");
IElementType CLASS_METHOD_NAME = LuaParserDefinitionKt.createType("CLASS_METHOD_NAME");
IElementType CLOSURE_EXPR = LuaParserDefinitionKt.createType("CLOSURE_EXPR");
IElementType DO_STAT = LuaParserDefinitionKt.createType("DO_STAT");
IElementType EMPTY_STAT = LuaParserDefinitionKt.createType("EMPTY_STAT");
IElementType EXPR = LuaParserDefinitionKt.createType("EXPR");
IElementType EXPR_LIST = LuaParserDefinitionKt.createType("EXPR_LIST");
IElementType EXPR_STAT = LuaParserDefinitionKt.createType("EXPR_STAT");
IElementType FOR_A_STAT = LuaParserDefinitionKt.createType("FOR_A_STAT");
IElementType FOR_B_STAT = LuaParserDefinitionKt.createType("FOR_B_STAT");
IElementType FUNC_BODY = LuaParserDefinitionKt.createType("FUNC_BODY");
IElementType FUNC_DEF = LuaParserDefinitionKt.createType("FUNC_DEF");
IElementType GOTO_STAT = LuaParserDefinitionKt.createType("GOTO_STAT");
IElementType IF_STAT = LuaParserDefinitionKt.createType("IF_STAT");
IElementType INDEX_EXPR = LuaParserDefinitionKt.createType("INDEX_EXPR");
IElementType LABEL_STAT = LuaParserDefinitionKt.createType("LABEL_STAT");
IElementType LIST_ARGS = LuaParserDefinitionKt.createType("LIST_ARGS");
IElementType LITERAL_EXPR = LuaParserDefinitionKt.createType("LITERAL_EXPR");
IElementType LOCAL_DEF = LuaParserDefinitionKt.createType("LOCAL_DEF");
IElementType LOCAL_FUNC_DEF = LuaParserDefinitionKt.createType("LOCAL_FUNC_DEF");
IElementType NAME_DEF = LuaParserDefinitionKt.createType("NAME_DEF");
IElementType NAME_EXPR = LuaParserDefinitionKt.createType("NAME_EXPR");
IElementType NAME_LIST = LuaParserDefinitionKt.createType("NAME_LIST");
IElementType PARAM_NAME_DEF = LuaParserDefinitionKt.createType("PARAM_NAME_DEF");
IElementType PAREN_EXPR = LuaParserDefinitionKt.createType("PAREN_EXPR");
IElementType REPEAT_STAT = LuaParserDefinitionKt.createType("REPEAT_STAT");
IElementType RETURN_STAT = LuaParserDefinitionKt.createType("RETURN_STAT");
IElementType SHEBANG_LINE = LuaParserDefinitionKt.createType("SHEBANG_LINE");
IElementType SINGLE_ARG = LuaParserDefinitionKt.createType("SINGLE_ARG");
IElementType TABLE_EXPR = LuaParserDefinitionKt.createType("TABLE_EXPR");
IElementType TABLE_FIELD = LuaParserDefinitionKt.createType("TABLE_FIELD");
IElementType TABLE_FIELD_SEP = LuaParserDefinitionKt.createType("TABLE_FIELD_SEP");
IElementType UNARY_EXPR = LuaParserDefinitionKt.createType("UNARY_EXPR");
IElementType UNARY_OP = LuaParserDefinitionKt.createType("UNARY_OP");
IElementType VAR_LIST = LuaParserDefinitionKt.createType("VAR_LIST");
IElementType WHILE_STAT = LuaParserDefinitionKt.createType("WHILE_STAT");
IElementType AND = LuaParserDefinitionKt.createToken("and");
IElementType ASSIGN = LuaParserDefinitionKt.createToken("=");
IElementType BIT_AND = LuaParserDefinitionKt.createToken("&");
IElementType BIT_LTLT = LuaParserDefinitionKt.createToken("<<");
IElementType BIT_OR = LuaParserDefinitionKt.createToken("|");
IElementType BIT_RTRT = LuaParserDefinitionKt.createToken(">>");
IElementType BIT_TILDE = LuaParserDefinitionKt.createToken("~");
IElementType BLOCK_COMMENT = LuaParserDefinitionKt.createToken("BLOCK_COMMENT");
IElementType BREAK = LuaParserDefinitionKt.createToken("break");
IElementType COLON = LuaParserDefinitionKt.createToken(":");
IElementType COMMA = LuaParserDefinitionKt.createToken(",");
IElementType CONCAT = LuaParserDefinitionKt.createToken("..");
IElementType DIV = LuaParserDefinitionKt.createToken("/");
IElementType DO = LuaParserDefinitionKt.createToken("do");
IElementType DOC_COMMENT = LuaParserDefinitionKt.createToken("DOC_COMMENT");
IElementType DOT = LuaParserDefinitionKt.createToken(".");
IElementType DOUBLE_COLON = LuaParserDefinitionKt.createToken("::");
IElementType DOUBLE_DIV = LuaParserDefinitionKt.createToken("//");
IElementType ELLIPSIS = LuaParserDefinitionKt.createToken("...");
IElementType ELSE = LuaParserDefinitionKt.createToken("else");
IElementType ELSEIF = LuaParserDefinitionKt.createToken("elseif");
IElementType END = LuaParserDefinitionKt.createToken("end");
IElementType ENDREGION = LuaParserDefinitionKt.createToken("ENDREGION");
IElementType EQ = LuaParserDefinitionKt.createToken("==");
IElementType EXP = LuaParserDefinitionKt.createToken("^");
IElementType FALSE = LuaParserDefinitionKt.createToken("false");
IElementType FOR = LuaParserDefinitionKt.createToken("for");
IElementType FUNCTION = LuaParserDefinitionKt.createToken("function");
IElementType GE = LuaParserDefinitionKt.createToken(">=");
IElementType GETN = LuaParserDefinitionKt.createToken("#");
IElementType GOTO = LuaParserDefinitionKt.createToken("goto");
IElementType GT = LuaParserDefinitionKt.createToken(">");
IElementType ID = LuaParserDefinitionKt.createToken("ID");
IElementType IF = LuaParserDefinitionKt.createToken("if");
IElementType IN = LuaParserDefinitionKt.createToken("in");
IElementType LBRACK = LuaParserDefinitionKt.createToken("[");
IElementType LCURLY = LuaParserDefinitionKt.createToken("{");
IElementType LE = LuaParserDefinitionKt.createToken("<=");
IElementType LOCAL = LuaParserDefinitionKt.createToken("local");
IElementType LPAREN = LuaParserDefinitionKt.createToken("(");
IElementType LT = LuaParserDefinitionKt.createToken("<");
IElementType MINUS = LuaParserDefinitionKt.createToken("-");
IElementType MOD = LuaParserDefinitionKt.createToken("%");
IElementType MULT = LuaParserDefinitionKt.createToken("*");
IElementType NE = LuaParserDefinitionKt.createToken("~=");
IElementType NIL = LuaParserDefinitionKt.createToken("nil");
IElementType NOT = LuaParserDefinitionKt.createToken("not");
IElementType NUMBER = LuaParserDefinitionKt.createToken("NUMBER");
IElementType OR = LuaParserDefinitionKt.createToken("or");
IElementType PLUS = LuaParserDefinitionKt.createToken("+");
IElementType RBRACK = LuaParserDefinitionKt.createToken("]");
IElementType RCURLY = LuaParserDefinitionKt.createToken("}");
IElementType REGION = LuaParserDefinitionKt.createToken("REGION");
IElementType REPEAT = LuaParserDefinitionKt.createToken("repeat");
IElementType RETURN = LuaParserDefinitionKt.createToken("return");
IElementType RPAREN = LuaParserDefinitionKt.createToken(")");
IElementType SEMI = LuaParserDefinitionKt.createToken(";");
IElementType SHEBANG = LuaParserDefinitionKt.createToken("#!");
IElementType SHEBANG_CONTENT = LuaParserDefinitionKt.createToken("SHEBANG_CONTENT");
IElementType SHORT_COMMENT = LuaParserDefinitionKt.createToken("SHORT_COMMENT");
IElementType STRING = LuaParserDefinitionKt.createToken("STRING");
IElementType THEN = LuaParserDefinitionKt.createToken("then");
IElementType TRUE = LuaParserDefinitionKt.createToken("true");
IElementType UNTIL = LuaParserDefinitionKt.createToken("until");
IElementType WHILE = LuaParserDefinitionKt.createToken("while");
class Factory {
public static PsiElement createElement(ASTNode node) {
IElementType type = node.getElementType();
if (type == ASSIGN_STAT) {
return new LuaAssignStatImpl(node);
}
else if (type == ATTRIBUTE) {
return new LuaAttributeImpl(node);
}
else if (type == BINARY_EXPR) {
return new LuaBinaryExprImpl(node);
}
else if (type == BINARY_OP) {
return new LuaBinaryOpImpl(node);
}
else if (type == BLOCK) {
return new LuaBlockImpl(node);
}
else if (type == BREAK_STAT) {
return new LuaBreakStatImpl(node);
}
else if (type == CALL_EXPR) {
return new LuaCallExprImpl(node);
}
else if (type == CLASS_METHOD_DEF) {
return new LuaClassMethodDefImpl(node);
}
else if (type == CLASS_METHOD_NAME) {
return new LuaClassMethodNameImpl(node);
}
else if (type == CLOSURE_EXPR) {
return new LuaClosureExprImpl(node);
}
else if (type == DO_STAT) {
return new LuaDoStatImpl(node);
}
else if (type == EMPTY_STAT) {
return new LuaEmptyStatImpl(node);
}
else if (type == EXPR) {
return new LuaExprImpl(node);
}
else if (type == EXPR_LIST) {
return new LuaExprListImpl(node);
}
else if (type == EXPR_STAT) {
return new LuaExprStatImpl(node);
}
else if (type == FOR_A_STAT) {
return new LuaForAStatImpl(node);
}
else if (type == FOR_B_STAT) {
return new LuaForBStatImpl(node);
}
else if (type == FUNC_BODY) {
return new LuaFuncBodyImpl(node);
}
else if (type == FUNC_DEF) {
return new LuaFuncDefImpl(node);
}
else if (type == GOTO_STAT) {
return new LuaGotoStatImpl(node);
}
else if (type == IF_STAT) {
return new LuaIfStatImpl(node);
}
else if (type == INDEX_EXPR) {
return new LuaIndexExprImpl(node);
}
else if (type == LABEL_STAT) {
return new LuaLabelStatImpl(node);
}
else if (type == LIST_ARGS) {
return new LuaListArgsImpl(node);
}
else if (type == LITERAL_EXPR) {
return new LuaLiteralExprImpl(node);
}
else if (type == LOCAL_DEF) {
return new LuaLocalDefImpl(node);
}
else if (type == LOCAL_FUNC_DEF) {
return new LuaLocalFuncDefImpl(node);
}
else if (type == NAME_DEF) {
return new LuaNameDefImpl(node);
}
else if (type == NAME_EXPR) {
return new LuaNameExprImpl(node);
}
else if (type == NAME_LIST) {
return new LuaNameListImpl(node);
}
else if (type == PARAM_NAME_DEF) {
return new LuaParamNameDefImpl(node);
}
else if (type == PAREN_EXPR) {
return new LuaParenExprImpl(node);
}
else if (type == REPEAT_STAT) {
return new LuaRepeatStatImpl(node);
}
else if (type == RETURN_STAT) {
return new LuaReturnStatImpl(node);
}
else if (type == SHEBANG_LINE) {
return new LuaShebangLineImpl(node);
}
else if (type == SINGLE_ARG) {
return new LuaSingleArgImpl(node);
}
else if (type == TABLE_EXPR) {
return new LuaTableExprImpl(node);
}
else if (type == TABLE_FIELD) {
return new LuaTableFieldImpl(node);
}
else if (type == TABLE_FIELD_SEP) {
return new LuaTableFieldSepImpl(node);
}
else if (type == UNARY_EXPR) {
return new LuaUnaryExprImpl(node);
}
else if (type == UNARY_OP) {
return new LuaUnaryOpImpl(node);
}
else if (type == VAR_LIST) {
return new LuaVarListImpl(node);
}
else if (type == WHILE_STAT) {
return new LuaWhileStatImpl(node);
}
throw new AssertionError("Unknown element type: " + type);
}
}
}
| |
package com.github.texxel.data;
import com.github.texxel.data.exceptions.DataSerializationException;
import com.github.texxel.data.exceptions.InvalidDataException;
import com.github.texxel.data.exceptions.MissingDataException;
import com.github.texxel.data.exceptions.WrongTypeException;
import com.github.texxel.data.serializers.ArrayConverter;
import java.util.*;
/**
* A helper class for conversion between Data and Primitive data
*/
class PrimitiveConverter {
private static Map<Class, Class> wrappedTypes = new HashMap<>();
static {
wrappedTypes.put(boolean.class, Boolean.class);
wrappedTypes.put(byte.class, Byte.class);
wrappedTypes.put(short.class, Short.class);
wrappedTypes.put(char.class, Character.class);
wrappedTypes.put(int.class, Integer.class);
wrappedTypes.put(long.class, Long.class);
wrappedTypes.put(float.class, Float.class);
wrappedTypes.put(double.class, Double.class);
}
public static void write (String key, Object obj, DataOut data) {
PData pData = data.pData;
if (obj == null) {
pData.setNull(key);
return;
}
if (obj instanceof String) {
data.write(key, (String)obj);
return;
}
Class clazz = obj.getClass();
long id = System.identityHashCode(obj);
String idKey = "__#" + id;
// check if the data has already been written
DataOut previous = data.global.get(id);
if (previous != null) {
if ( previous.depth <= data.depth ) {
// write a reference to the other location
pData.set(key, idKey);
// add a reference to the top level
if (!data.root.contains(idKey)) {
PData section = data.root.createSection(idKey);
for (String path : previous.path) {
section = section.createSection(path);
}
}
} else {
// move the contents to over here
PData oldPContainer = data.containers.get(id);
PData newPContainer = data.pData;
PData objPData = previous.pData;
//rewire the primitive containers
oldPContainer.set(previous.path[previous.path.length - 1], idKey);
newPContainer.data.put(key, objPData);
// correct the data container
DataOut moved = new DataOut(data.getPath(key), objPData, data.global, data.containers, data.root, data.depth+1);
data.global.put(id, moved);
// correct the top level reference
PData section = data.root.createSection(idKey);
for (String path : moved.path) {
section = section.createSection(path);
}
}
return;
}
// check the object type can be serialized
DataConverter converter = ConverterRegistry.find(clazz);
if (converter == null)
throw new InvalidDataException("No converter registered for '" + clazz.getName() + "'");
// write the data here
DataOut objData = data.createSection(key);
data.global.put(id, objData);
data.containers.put(id, data.pData);
objData.write("__classname", clazz.getName());
objData.write("__id", idKey);
try {
converter.serialize(obj, objData);
} catch (Throwable thr) {
throw new DataSerializationException("Failed to serialize '" + clazz.getName() + "'", thr);
}
}
public static <T> T read(String key, Class<T> expected, DataIn data) {
PData pData = data.pData;
PData.Type type = pData.getType(key);
if (type == null)
throw new MissingDataException("No value mapped to '" + key + "'");
if (expected.isPrimitive())
expected = wrappedTypes.get(expected);
switch (type) {
case NULL:
return null;
case BOOLEAN:
if (expected.isAssignableFrom(Boolean.class))
return (T) (Boolean) pData.getBoolean(key);
throw badType(expected, Boolean.class, key);
case LONG:
if (expected.isAssignableFrom(Long.class))
return (T)(Long)pData.getLong(key);
if (expected.isAssignableFrom(Integer.class))
return (T)(Integer)(int)pData.getLong(key);
if (expected.isAssignableFrom(Float.class))
return (T)(Float)(float)pData.getLong(key);
if (expected.isAssignableFrom(Double.class))
return (T)(Double)(double)pData.getLong(key);
throw badType(expected, Long.class, key);
case DOUBLE:
if (expected.isAssignableFrom(Double.class))
return (T)(Double)pData.getDouble(key);
if (expected.isAssignableFrom(Float.class))
return (T)(Float)(float)pData.getDouble(key);
throw badType(expected, Double.class, key);
case STRING:
String contents = pData.getString(key);
if (contents.startsWith("__#")) {
// this is actually an object reference
return getObjectReference(contents, data, expected);
}
if (expected.isAssignableFrom(String.class))
return (T)contents;
throw badType(expected, String.class, key);
case DATA:
// continue;
break;
default:
throw new RuntimeException("Unknown type " + type);
}
if (type == PData.Type.NULL)
return null;
if (type == PData.Type.STRING) {
String contents = pData.getString(key);
// it might be a reference type?
if (contents.startsWith("__#")) {
return getObjectReference(contents, data, expected);
}
// maybe we want a string?
if (expected.isInstance(contents)) {
return (T)contents;
}
}
if (type != PData.Type.DATA)
throw new WrongTypeException("Expected an object type but found '" + type + "' at key '" + key + "'");
// look up the cache for the object
Object cached = data.cache.get(key);
if (cached != null) {
return (T) cached;
}
T value;
PData pDataSec = pData.getSection(key);
DataIn dataSec = new DataIn(pDataSec, data.references, data.root);
if (!pDataSec.contains("__classname")) {
// this is just a simple sub bundle
if (expected.isAssignableFrom(DataIn.class))
value = (T) dataSec;
else
throw badType(expected, DataIn.class, key);
} else {
String id = pDataSec.getString("__id");
Object maybe = data.references.get(id);
if (maybe != null) {
Class actual = maybe.getClass();
if (expected.isAssignableFrom(actual))
return (T) maybe;
else
throw new WrongTypeException(expected, actual);
}
value = createObject(dataSec, expected);
}
// store in cache and continue
data.cache.put(key, value);
return (T)value;
}
private static RuntimeException badType( Class expected, Class actual, String key ) {
throw new WrongTypeException("Expected " + expected.getName() + " but found " + actual + " in key " + key);
}
private static <T> T getObjectReference(String id, DataIn data, Class<T> expected) {
if (data.references.containsKey(id)) {
Object value = data.references.get(id);
if (value == null)
return null;
Class actual = value.getClass();
if (expected.isAssignableFrom(actual))
return (T)value;
else
throw badType(expected, actual, id);
}
PData path = data.root.getSection(id);
Set<String> keys = path.keys();
PData objPData = data.root;
while (!keys.isEmpty()) {
String key = keys.iterator().next();
path = path.getSection(key);
objPData = objPData.getSection(key);
keys = path.keys();
}
DataIn objData = new DataIn(objPData, data.references, data.root);
return createObject(objData, expected);
}
private static <T> T createObject(DataIn data, Class<T> expected) {
PData pData = data.pData;
String actualName = pData.getString("__classname");
Class actual;
try {
actual = Class.forName(actualName);
} catch (ClassNotFoundException e) {
throw new InvalidDataException("No class named '" + actualName + "'", e);
}
if (!expected.isAssignableFrom(actual))
throw new WrongTypeException(expected, actual);
DataConverter<T> converter = ConverterRegistry.find(actual);
if (converter == null)
throw new InvalidDataException("No converter registered for " + actual);
T value = converter.create(data, (Class<T>)actual);
data.references.put(pData.getString("__id"), value);
converter.initialise(data, value);
return value;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.smartPointers;
import com.intellij.lang.LanguageUtil;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.ProperTextRange;
import com.intellij.openapi.util.Segment;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.FreeThreadedFileViewProvider;
import com.intellij.psi.impl.PsiManagerEx;
import com.intellij.psi.impl.source.tree.ForeignLeafPsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.ref.Reference;
import java.lang.ref.SoftReference;
import java.lang.ref.WeakReference;
class SmartPsiElementPointerImpl<E extends PsiElement> implements SmartPointerEx<E> {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.smartPointers.SmartPsiElementPointerImpl");
private Reference<E> myElement;
private final SmartPointerElementInfo myElementInfo;
private final Class<? extends PsiElement> myElementClass;
private byte myReferenceCount = 1;
SmartPsiElementPointerImpl(@NotNull Project project, @NotNull E element, @Nullable PsiFile containingFile) {
this(element, createElementInfo(project, element, containingFile), element.getClass());
}
SmartPsiElementPointerImpl(@NotNull E element,
@NotNull SmartPointerElementInfo elementInfo,
@NotNull Class<? extends PsiElement> elementClass) {
ApplicationManager.getApplication().assertReadAccessAllowed();
myElementClass = elementClass;
myElementInfo = elementInfo;
cacheElement(element);
}
@Override
public boolean equals(Object obj) {
return obj instanceof SmartPsiElementPointer && pointsToTheSameElementAs(this, (SmartPsiElementPointer)obj);
}
@Override
public int hashCode() {
return myElementInfo.elementHashCode();
}
@Override
@NotNull
public Project getProject() {
return myElementInfo.getProject();
}
@Override
@Nullable
public E getElement() {
E element = getCachedElement();
if (element == null || !element.isValid()) {
element = doRestoreElement();
cacheElement(element);
}
return element;
}
@Nullable
E doRestoreElement() {
//noinspection unchecked
E element = (E)myElementInfo.restoreElement();
if (element != null && (!element.getClass().equals(myElementClass) || !element.isValid())) {
return null;
}
return element;
}
void cacheElement(@Nullable E element) {
myElement = element == null ? null :
PsiManagerEx.getInstanceEx(getProject()).isBatchFilesProcessingMode() ? new WeakReference<E>(element) :
new SoftReference<E>(element);
}
@Override
public E getCachedElement() {
return com.intellij.reference.SoftReference.dereference(myElement);
}
@Override
public PsiFile getContainingFile() {
PsiFile file = getElementInfo().restoreFile();
if (file != null) {
return file;
}
final Document doc = myElementInfo.getDocumentToSynchronize();
if (doc == null) {
final E resolved = getElement();
return resolved == null ? null : resolved.getContainingFile();
}
return PsiDocumentManager.getInstance(getProject()).getPsiFile(doc);
}
@Override
public VirtualFile getVirtualFile() {
return myElementInfo.getVirtualFile();
}
@Override
public Segment getRange() {
return myElementInfo.getRange();
}
@Nullable
@Override
public Segment getPsiRange() {
return myElementInfo.getPsiRange();
}
@NotNull
private static <E extends PsiElement> SmartPointerElementInfo createElementInfo(@NotNull Project project,
@NotNull E element,
PsiFile containingFile) {
SmartPointerElementInfo elementInfo = doCreateElementInfo(project, element, containingFile);
if (ApplicationManager.getApplication().isUnitTestMode() && !element.equals(elementInfo.restoreElement())) {
// likely cause: PSI having isPhysical==true, but which can't be restored by containing file and range. To fix, make isPhysical return false
LOG.error("Cannot restore " + element + " of " + element.getClass() + " from " + elementInfo);
}
return elementInfo;
}
@NotNull
private static <E extends PsiElement> SmartPointerElementInfo doCreateElementInfo(@NotNull Project project,
@NotNull E element,
PsiFile containingFile) {
if (element instanceof PsiDirectory) {
return new DirElementInfo((PsiDirectory)element);
}
if (element instanceof PsiCompiledElement || containingFile == null || !containingFile.isPhysical() || !element.isPhysical()) {
if (element instanceof StubBasedPsiElement && element instanceof PsiCompiledElement) {
if (element instanceof PsiFile) {
return new FileElementInfo((PsiFile)element);
}
PsiAnchor.StubIndexReference stubReference = PsiAnchor.createStubReference(element, containingFile);
if (stubReference != null) {
return new ClsElementInfo(stubReference);
}
}
return new HardElementInfo(project, element);
}
FileViewProvider viewProvider = containingFile.getViewProvider();
if (viewProvider instanceof FreeThreadedFileViewProvider) {
PsiLanguageInjectionHost hostContext = InjectedLanguageManager.getInstance(containingFile.getProject()).getInjectionHost(containingFile);
TextRange elementRange = element.getTextRange();
if (hostContext != null && elementRange != null) {
SmartPsiElementPointer<PsiLanguageInjectionHost> hostPointer = SmartPointerManager.getInstance(project).createSmartPsiElementPointer(hostContext);
return new InjectedSelfElementInfo(project, element, elementRange, containingFile, hostPointer);
}
}
SmartPointerElementInfo info = AnchorElementInfoFactory.createElementInfo(element, containingFile);
if (info != null) {
return info;
}
if (element instanceof PsiFile) {
return new FileElementInfo((PsiFile)element);
}
TextRange elementRange = element.getTextRange();
if (elementRange == null) {
return new HardElementInfo(project, element);
}
if (elementRange.isEmpty() && PsiTreeUtil.findChildOfType(element, ForeignLeafPsiElement.class) != null) {
// PSI built on C-style macro expansions. It has empty ranges, no text, but complicated structure. It can't be reliably
// restored by just one offset in a file, so hold it on a hard reference
return new HardElementInfo(project, element);
}
ProperTextRange proper = ProperTextRange.create(elementRange);
return new SelfElementInfo(project, proper, AnchorTypeInfo.obtainInfo(element, LanguageUtil.getRootLanguage(element)), containingFile, false);
}
@NotNull
SmartPointerElementInfo getElementInfo() {
return myElementInfo;
}
static boolean pointsToTheSameElementAs(@NotNull SmartPsiElementPointer pointer1, @NotNull SmartPsiElementPointer pointer2) {
if (pointer1 == pointer2) return true;
if (pointer1 instanceof SmartPsiElementPointerImpl && pointer2 instanceof SmartPsiElementPointerImpl) {
SmartPsiElementPointerImpl impl1 = (SmartPsiElementPointerImpl)pointer1;
SmartPsiElementPointerImpl impl2 = (SmartPsiElementPointerImpl)pointer2;
SmartPointerElementInfo elementInfo1 = impl1.getElementInfo();
SmartPointerElementInfo elementInfo2 = impl2.getElementInfo();
if (!elementInfo1.pointsToTheSameElementAs(elementInfo2)) return false;
PsiElement cachedElement1 = impl1.getCachedElement();
PsiElement cachedElement2 = impl2.getCachedElement();
return cachedElement1 == null || cachedElement2 == null || Comparing.equal(cachedElement1, cachedElement2);
}
return Comparing.equal(pointer1.getElement(), pointer2.getElement());
}
synchronized int incrementAndGetReferenceCount(int delta) {
if (myReferenceCount == Byte.MAX_VALUE) return Byte.MAX_VALUE; // saturated
if (myReferenceCount == 0) return 0; // disposed, not to be reused again
return myReferenceCount += delta;
}
@Override
public String toString() {
return myElementInfo.toString();
}
}
| |
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source.dash.offline;
import android.net.Uri;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.extractor.ChunkIndex;
import com.google.android.exoplayer2.offline.DownloadException;
import com.google.android.exoplayer2.offline.SegmentDownloader;
import com.google.android.exoplayer2.offline.StreamKey;
import com.google.android.exoplayer2.source.dash.DashSegmentIndex;
import com.google.android.exoplayer2.source.dash.DashUtil;
import com.google.android.exoplayer2.source.dash.DashWrappingSegmentIndex;
import com.google.android.exoplayer2.source.dash.manifest.AdaptationSet;
import com.google.android.exoplayer2.source.dash.manifest.DashManifest;
import com.google.android.exoplayer2.source.dash.manifest.DashManifestParser;
import com.google.android.exoplayer2.source.dash.manifest.Period;
import com.google.android.exoplayer2.source.dash.manifest.RangedUri;
import com.google.android.exoplayer2.source.dash.manifest.Representation;
import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DataSpec;
import com.google.android.exoplayer2.upstream.ParsingLoadable.Parser;
import com.google.android.exoplayer2.upstream.cache.CacheDataSource;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executor;
/**
* A downloader for DASH streams.
*
* <p>Example usage:
*
* <pre>{@code
* SimpleCache cache = new SimpleCache(downloadFolder, new NoOpCacheEvictor(), databaseProvider);
* CacheDataSource.Factory cacheDataSourceFactory =
* new CacheDataSource.Factory()
* .setCache(cache)
* .setUpstreamDataSourceFactory(new DefaultHttpDataSourceFactory(userAgent));
* // Create a downloader for the first representation of the first adaptation set of the first
* // period.
* DashDownloader dashDownloader =
* new DashDownloader(
* new MediaItem.Builder()
* .setUri(manifestUrl)
* .setStreamKeys(Collections.singletonList(new StreamKey(0, 0, 0)))
* .build(),
* cacheDataSourceFactory);
* // Perform the download.
* dashDownloader.download(progressListener);
* // Use the downloaded data for playback.
* DashMediaSource mediaSource =
* new DashMediaSource.Factory(cacheDataSourceFactory).createMediaSource(mediaItem);
* }</pre>
*/
public final class DashDownloader extends SegmentDownloader<DashManifest> {
/** @deprecated Use {@link #DashDownloader(MediaItem, CacheDataSource.Factory)} instead. */
@SuppressWarnings("deprecation")
@Deprecated
public DashDownloader(
Uri manifestUri, List<StreamKey> streamKeys, CacheDataSource.Factory cacheDataSourceFactory) {
this(manifestUri, streamKeys, cacheDataSourceFactory, Runnable::run);
}
/**
* Creates a new instance.
*
* @param mediaItem The {@link MediaItem} to be downloaded.
* @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the
* download will be written.
*/
public DashDownloader(MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory) {
this(mediaItem, cacheDataSourceFactory, Runnable::run);
}
/**
* @deprecated Use {@link #DashDownloader(MediaItem, CacheDataSource.Factory, Executor)} instead.
*/
@Deprecated
public DashDownloader(
Uri manifestUri,
List<StreamKey> streamKeys,
CacheDataSource.Factory cacheDataSourceFactory,
Executor executor) {
this(
new MediaItem.Builder().setUri(manifestUri).setStreamKeys(streamKeys).build(),
cacheDataSourceFactory,
executor);
}
/**
* Creates a new instance.
*
* @param mediaItem The {@link MediaItem} to be downloaded.
* @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the
* download will be written.
* @param executor An {@link Executor} used to make requests for the media being downloaded.
* Providing an {@link Executor} that uses multiple threads will speed up the download by
* allowing parts of it to be executed in parallel.
*/
public DashDownloader(
MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory, Executor executor) {
this(mediaItem, new DashManifestParser(), cacheDataSourceFactory, executor);
}
/**
* Creates a new instance.
*
* @param mediaItem The {@link MediaItem} to be downloaded.
* @param manifestParser A parser for DASH manifests.
* @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the
* download will be written.
* @param executor An {@link Executor} used to make requests for the media being downloaded.
* Providing an {@link Executor} that uses multiple threads will speed up the download by
* allowing parts of it to be executed in parallel.
*/
public DashDownloader(
MediaItem mediaItem,
Parser<DashManifest> manifestParser,
CacheDataSource.Factory cacheDataSourceFactory,
Executor executor) {
super(mediaItem, manifestParser, cacheDataSourceFactory, executor);
}
@Override
protected List<Segment> getSegments(
DataSource dataSource, DashManifest manifest, boolean allowIncompleteList)
throws IOException {
ArrayList<Segment> segments = new ArrayList<>();
for (int i = 0; i < manifest.getPeriodCount(); i++) {
Period period = manifest.getPeriod(i);
long periodStartUs = C.msToUs(period.startMs);
long periodDurationUs = manifest.getPeriodDurationUs(i);
List<AdaptationSet> adaptationSets = period.adaptationSets;
for (int j = 0; j < adaptationSets.size(); j++) {
addSegmentsForAdaptationSet(
dataSource,
adaptationSets.get(j),
periodStartUs,
periodDurationUs,
allowIncompleteList,
segments);
}
}
return segments;
}
private static void addSegmentsForAdaptationSet(
DataSource dataSource,
AdaptationSet adaptationSet,
long periodStartUs,
long periodDurationUs,
boolean allowIncompleteList,
ArrayList<Segment> out)
throws IOException {
for (int i = 0; i < adaptationSet.representations.size(); i++) {
Representation representation = adaptationSet.representations.get(i);
DashSegmentIndex index;
try {
index = getSegmentIndex(dataSource, adaptationSet.type, representation);
if (index == null) {
// Loading succeeded but there was no index.
throw new DownloadException("Missing segment index");
}
} catch (IOException e) {
if (!allowIncompleteList) {
throw e;
}
// Generating an incomplete segment list is allowed. Advance to the next representation.
continue;
}
int segmentCount = index.getSegmentCount(periodDurationUs);
if (segmentCount == DashSegmentIndex.INDEX_UNBOUNDED) {
throw new DownloadException("Unbounded segment index");
}
String baseUrl = representation.baseUrl;
RangedUri initializationUri = representation.getInitializationUri();
if (initializationUri != null) {
addSegment(periodStartUs, baseUrl, initializationUri, out);
}
RangedUri indexUri = representation.getIndexUri();
if (indexUri != null) {
addSegment(periodStartUs, baseUrl, indexUri, out);
}
long firstSegmentNum = index.getFirstSegmentNum();
long lastSegmentNum = firstSegmentNum + segmentCount - 1;
for (long j = firstSegmentNum; j <= lastSegmentNum; j++) {
addSegment(periodStartUs + index.getTimeUs(j), baseUrl, index.getSegmentUrl(j), out);
}
}
}
private static void addSegment(
long startTimeUs, String baseUrl, RangedUri rangedUri, ArrayList<Segment> out) {
DataSpec dataSpec =
new DataSpec(rangedUri.resolveUri(baseUrl), rangedUri.start, rangedUri.length);
out.add(new Segment(startTimeUs, dataSpec));
}
private static @Nullable DashSegmentIndex getSegmentIndex(
DataSource dataSource, int trackType, Representation representation) throws IOException {
DashSegmentIndex index = representation.getIndex();
if (index != null) {
return index;
}
ChunkIndex seekMap = DashUtil.loadChunkIndex(dataSource, trackType, representation);
return seekMap == null
? null
: new DashWrappingSegmentIndex(seekMap, representation.presentationTimeOffsetUs);
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.siddhi.core.query.function;
import junit.framework.Assert;
import org.apache.log4j.Logger;
import org.junit.Before;
import org.junit.Test;
import org.wso2.siddhi.core.ExecutionPlanRuntime;
import org.wso2.siddhi.core.SiddhiManager;
import org.wso2.siddhi.core.event.Event;
import org.wso2.siddhi.core.query.output.callback.QueryCallback;
import org.wso2.siddhi.core.stream.input.InputHandler;
import org.wso2.siddhi.core.util.EventPrinter;
public class InstanceOfFunctionTestCase {
static final Logger log = Logger.getLogger(InstanceOfFunctionTestCase.class);
private int count;
private boolean eventArrived;
@Before
public void init() {
count = 0;
eventArrived = false;
}
@Test
public void testInstanceOfLongFunctionExtensionTestCase() throws InterruptedException {
log.info("testInstanceOfLongFunctionExtension TestCase");
SiddhiManager siddhiManager = new SiddhiManager();
String sensorEventStream = "@config(async = 'true') define stream sensorEventStream (timestamp long, " +
"isPowerSaverEnabled bool, sensorId int , sensorName string, longitude double, latitude double, " +
"humidity float, sensorValue double);";
String query = ("@info(name = 'query1') " +
"from sensorEventStream " +
"select sensorName ,instanceOfLong(timestamp) as valid, timestamp " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(sensorEventStream + query);
executionPlanRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
for (Event inEvent : inEvents) {
count++;
if (count == 1) {
Assert.assertEquals(true, inEvent.getData(1));
}
if (count == 2) {
Assert.assertEquals(false, inEvent.getData(1));
}
eventArrived = true;
}
}
});
InputHandler inputHandler = executionPlanRuntime.getInputHandler("sensorEventStream");
executionPlanRuntime.start();
inputHandler.send(new Object[]{19900813115534l, false, 601, "temperature", 90.34344, 20.44345, 2.3f, 20.44345});
inputHandler.send(new Object[]{1990, false, 602, "temperature", 90.34344, 20.44345, 2.3f, 20.44345});
Thread.sleep(100);
junit.framework.Assert.assertEquals(2, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
}
@Test
public void testInstanceOfBooleanFunctionExtensionTestCase() throws InterruptedException {
log.info("testInstanceOfBooleanFunctionExtension TestCase");
SiddhiManager siddhiManager = new SiddhiManager();
String sensorEventStream = "@config(async = 'true') define stream sensorEventStream (timestamp long, " +
"isPowerSaverEnabled bool, sensorId int , sensorName string, longitude double, latitude double, " +
"humidity float, sensorValue double);";
String query = ("@info(name = 'query1') " +
"from sensorEventStream " +
"select sensorName ,instanceOfBoolean(isPowerSaverEnabled) as valid, isPowerSaverEnabled " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(sensorEventStream + query);
executionPlanRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
for (Event inEvent : inEvents) {
count++;
if (count == 1) {
Assert.assertEquals(true, inEvent.getData(1));
}
if (count == 2) {
Assert.assertEquals(false, inEvent.getData(1));
}
eventArrived = true;
}
}
});
InputHandler inputHandler = executionPlanRuntime.getInputHandler("sensorEventStream");
executionPlanRuntime.start();
inputHandler.send(new Object[]{19900813115534l, false, 601, "temperature", 90.34344, 20.44345, 2.3f, 20.44345});
inputHandler.send(new Object[]{19900813115534l, "notAvailable", 602, "temperature", 90.34344, 20.44345, 2.3f,
20.44345});
Thread.sleep(100);
junit.framework.Assert.assertEquals(2, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
}
@Test
public void testInstanceOfIntegerFunctionExtensionTestCase() throws InterruptedException {
log.info("testInstanceOfIntegerFunctionExtension TestCase");
SiddhiManager siddhiManager = new SiddhiManager();
String sensorEventStream = "@config(async = 'true') define stream sensorEventStream (timestamp long, " +
"isPowerSaverEnabled bool, sensorId int , sensorName string, longitude double, latitude double, " +
"humidity float, sensorValue double);";
String query = ("@info(name = 'query1') " +
"from sensorEventStream " +
"select sensorName ,instanceOfInteger(sensorId) as valid, sensorId " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(sensorEventStream + query);
executionPlanRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
for (Event inEvent : inEvents) {
count++;
if (count == 1) {
Assert.assertEquals(true, inEvent.getData(1));
}
if (count == 2) {
Assert.assertEquals(false, inEvent.getData(1));
}
eventArrived = true;
}
}
});
InputHandler inputHandler = executionPlanRuntime.getInputHandler("sensorEventStream");
executionPlanRuntime.start();
inputHandler.send(new Object[]{19900813115534l, false, 601, "temperature", 90.34344, 20.44345, 2.3f, 20.44345});
inputHandler.send(new Object[]{19900813115534l, true, 60232434.657, "temperature", 90.34344, 20.44345, 2.3f,
20.44345});
Thread.sleep(100);
junit.framework.Assert.assertEquals(2, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
}
@Test
public void testInstanceOfStringFunctionExtensionTestCase() throws InterruptedException {
log.info("testInstanceOfStringFunctionExtension TestCase");
SiddhiManager siddhiManager = new SiddhiManager();
String sensorEventStream = "@config(async = 'true') define stream sensorEventStream (timestamp long, " +
"isPowerSaverEnabled bool, sensorId int , sensorName string, longitude double, latitude double, " +
"humidity float, sensorValue double);";
String query = ("@info(name = 'query1') " +
"from sensorEventStream " +
"select sensorName ,instanceOfString(sensorName) as valid " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(sensorEventStream + query);
executionPlanRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
for (Event inEvent : inEvents) {
count++;
if (count == 1) {
Assert.assertEquals(true, inEvent.getData(1));
}
if (count == 2) {
Assert.assertEquals(false, inEvent.getData(1));
}
eventArrived = true;
}
}
});
InputHandler inputHandler = executionPlanRuntime.getInputHandler("sensorEventStream");
executionPlanRuntime.start();
inputHandler.send(new Object[]{19900813115534l, false, 601, "temperature", 90.34344, 20.44345, 2.3f, 20.44345});
inputHandler.send(new Object[]{19900813115534l, true, 602, 90.34344, 90.34344, 20.44345, 2.3f, 20.44345});
Thread.sleep(1000);
junit.framework.Assert.assertEquals(2, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
}
@Test
public void testInstanceOfDoubleFunctionExtensionTestCase() throws InterruptedException {
log.info("testInstanceOfDoubleFunctionExtension TestCase");
SiddhiManager siddhiManager = new SiddhiManager();
String sensorEventStream = "@config(async = 'true') define stream sensorEventStream (timestamp long, " +
"isPowerSaverEnabled bool, sensorId int , sensorName string, longitude double, latitude double, " +
"humidity float, sensorValue double);";
String query = ("@info(name = 'query1') " +
"from sensorEventStream " +
"select sensorName ,instanceOfDouble(longitude) as valid, longitude " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(sensorEventStream + query);
executionPlanRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
for (Event inEvent : inEvents) {
count++;
if (count == 1) {
Assert.assertEquals(true, inEvent.getData(1));
}
if (count == 2) {
Assert.assertEquals(false, inEvent.getData(1));
}
eventArrived = true;
}
}
});
InputHandler inputHandler = executionPlanRuntime.getInputHandler("sensorEventStream");
executionPlanRuntime.start();
inputHandler.send(new Object[]{19900813115534l, false, 601, "temperature", 90.34344, 20.44345, 2.3f, 20.44345});
inputHandler.send(new Object[]{19900813115534l, true, 602, "temperature", "90.3434", 20.44345, 2.3f, 20.44345});
Thread.sleep(100);
junit.framework.Assert.assertEquals(2, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
}
@Test
public void testInstanceOfFloatFunctionExtensionTestCase() throws InterruptedException {
log.info("testInstanceOfFloatFunctionExtension TestCase");
SiddhiManager siddhiManager = new SiddhiManager();
String sensorEventStream = "@config(async = 'true') define stream sensorEventStream (timestamp long, " +
"isPowerSaverEnabled bool, sensorId int , sensorName string, longitude double, latitude double, " +
"humidity float, sensorValue double);";
String query = ("@info(name = 'query1') " +
"from sensorEventStream " +
"select sensorName ,instanceOfFloat(humidity) as valid, longitude " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(sensorEventStream + query);
executionPlanRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
for (Event inEvent : inEvents) {
count++;
if (count == 1) {
Assert.assertEquals(true, inEvent.getData(1));
}
if (count == 2) {
Assert.assertEquals(false, inEvent.getData(1));
}
eventArrived = true;
}
}
});
InputHandler inputHandler = executionPlanRuntime.getInputHandler("sensorEventStream");
executionPlanRuntime.start();
inputHandler.send(new Object[]{19900813115534l, false, 601, "temperature", 90.34344, 20.44345, 2.3f, 20.44345});
inputHandler.send(new Object[]{19900813115534l, true, 602, "temperature", 90.34344, 20.44345, 2.3, 20.44345});
Thread.sleep(100);
junit.framework.Assert.assertEquals(2, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
}
}
| |
package com.bitdubai.fermat_pip_addon.layer.identity.device_user.developer.bitdubai.version_1;
import com.bitdubai.fermat_api.Addon;
import com.bitdubai.fermat_api.Service;
import com.bitdubai.fermat_api.layer._1_definition.enums.Addons;
import com.bitdubai.fermat_api.layer._1_definition.enums.DeviceDirectory;
import com.bitdubai.fermat_api.layer._1_definition.enums.PlatformFileName;
import com.bitdubai.fermat_api.layer._1_definition.enums.ServiceStatus;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DealsWithPlatformDatabaseSystem;
import com.bitdubai.fermat_api.layer.osa_android.database_system.PlatformDatabaseSystem;
import com.bitdubai.fermat_api.layer._2_os.file_system.exceptions.CantCreateFileException;
import com.bitdubai.fermat_api.layer._3_platform_service.error_manager.DealsWithErrors;
import com.bitdubai.fermat_api.layer._3_platform_service.error_manager.ErrorManager;
import com.bitdubai.fermat_api.layer._3_platform_service.error_manager.UnexpectedAddonsExceptionSeverity;
import com.bitdubai.fermat_api.layer._3_platform_service.event_manager.DealsWithEvents;
import com.bitdubai.fermat_api.layer._3_platform_service.event_manager.EventManager;
import com.bitdubai.fermat_api.layer._3_platform_service.event_manager.EventHandler;
import com.bitdubai.fermat_api.layer._3_platform_service.event_manager.EventListener;
import com.bitdubai.fermat_api.layer._2_os.file_system.*;
import com.bitdubai.fermat_api.layer._2_os.file_system.exceptions.CantPersistFileException;
import com.bitdubai.fermat_api.layer._2_os.file_system.exceptions.FileNotFoundException;
import com.bitdubai.fermat_api.layer._5_user.device_user.exceptions.CantCreateDeviceUserException;
import com.bitdubai.fermat_api.layer._5_user.device_user.exceptions.CantLoadDeviceUserException;
import com.bitdubai.fermat_api.layer._5_user.device_user.DeviceUser;
import com.bitdubai.fermat_api.layer._5_user.device_user.DeviceUserManager;
import com.bitdubai.fermat_api.layer._5_user.device_user.exceptions.LoginFailedException;
import com.bitdubai.fermat_api.layer._5_user.User;
import com.bitdubai.fermat_pip_addon.layer.identity.device_user.developer.bitdubai.version_1.structure.PlatformDeviceUser;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
/**
* Created by ciencias on 22.01.15.
*/
/**
* The User Manager knows the users managed by the current device.
*
* It is responsible for login in users to the current device.
*/
public class DeviceUserUserAddonRoot implements Addon, DealsWithErrors, DealsWithEvents,DealsWithPlatformDatabaseSystem, DealsWithPlatformFileSystem, DeviceUserManager, Service {
/**
* DealsWithErrors Interface member variables.
*/
ErrorManager errorManager;
/**
* DealWithEvents Interface member variables.
*/
EventManager eventManager;
/**
* DealsWithPluginDatabaseSystem interface implementation.
*/
@Override
public void setPlatformDatabaseSystem(PlatformDatabaseSystem platformDatabaseSystem) {
this.platformDatabaseSystem = platformDatabaseSystem;
}
/**
* DealsWithPlatformDatabaseSystem Interface member variables.
*/
PlatformDatabaseSystem platformDatabaseSystem;
/**
* DealsWithPlatformFileSystem Interface member variables.
*/
PlatformFileSystem platformFileSystem;
/**
* DeviceUserManager Interface member variables.
*/
DeviceUser mLoggedInDeviceUser;
/**
* Service Interface member variables.
*/
ServiceStatus serviceStatus = ServiceStatus.CREATED;
List<EventListener> listenersAdded = new ArrayList();
/**
*DealWithErrors Interface implementation.
*/
@Override
public void setErrorManager(ErrorManager errorManager) {
this.errorManager = errorManager;
}
/**
* DealWithEvents Interface implementation.
*/
@Override
public void setEventManager(EventManager eventManager) {
this.eventManager = eventManager;
}
/**
* DealsWithPlatformFileSystem Interface implementation.
*/
@Override
public void setPlatformFileSystem(PlatformFileSystem platformFileSystem) {
this.platformFileSystem = platformFileSystem;
}
/**
* DeviceUserManager Interface implementation.
*/
@Override
public DeviceUser getLoggedInUser() {
return mLoggedInDeviceUser;
}
@Override
public DeviceUser createUser() throws CantCreateDeviceUserException {
try
{
DeviceUser deviceUser = new PlatformDeviceUser();
((DealsWithPlatformFileSystem) deviceUser).setPlatformFileSystem(this.platformFileSystem);
((DealsWithErrors) deviceUser).setErrorManager(this.errorManager);
deviceUser.createUser();
return deviceUser;
}
catch (CantCreateDeviceUserException cantCreateDeviceUserException)
{
/**
* This is bad, the only thing I can do is to throw the exception again.
*/
errorManager.reportUnexpectedAddonsException(Addons.DEVICE_USER, UnexpectedAddonsExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_ADDONS, cantCreateDeviceUserException);
throw cantCreateDeviceUserException;
}
}
@Override
public void loadUser(UUID id) throws CantLoadDeviceUserException {
try
{
DeviceUser deviceUser = new PlatformDeviceUser();
((DealsWithPlatformFileSystem) deviceUser).setPlatformFileSystem(this.platformFileSystem);
((DealsWithErrors) deviceUser).setErrorManager(this.errorManager);
deviceUser.loadUser(id);
mLoggedInDeviceUser = deviceUser;
}
catch (CantLoadDeviceUserException cantLoadDeviceUserException)
{
/**
* This is bad, the only thing I can do is to throw the exception again.
*/
errorManager.reportUnexpectedAddonsException(Addons.DEVICE_USER, UnexpectedAddonsExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_ADDONS, cantLoadDeviceUserException);
throw cantLoadDeviceUserException;
}
}
/**
* DeviceUserManager Interface implementation.
*/
@Override
public User getUser(UUID id){
return null;
}
/**
* Service Interface implementation.
*/
@Override
public void start() {
/**
* Now I will recover the last state, If there was a user logged in before closing the APP the last time, I will
* re-loggin it,
*/
/**
* If there is no last state file, I assume this is the first time the platform is running on this device.
* Under this situation I will do the following;
*
* 1) Create a new User with no password.
* 2) Auto login that user.
* 3) Save the last state for future use.
*/
/**
* I will initialize the handling of platform events.
*/
EventListener eventListener;
EventHandler eventHandler;
// eventListener = eventManager.getNewListener(EventType.DEVICE_USER_CREATED);
// eventHandler = new UserCreatedEventHandler();
// ((UserCreatedEventHandler) eventHandler).setWalletManager(this);
// eventListener.setEventHandler(eventHandler);
// eventManager.addListener(eventListener);
// listenersAdded.add(eventListener);
this.serviceStatus = ServiceStatus.STARTED;
}
@Override
public void pause() {
this.serviceStatus = ServiceStatus.PAUSED;
}
@Override
public void resume() {
this.serviceStatus = ServiceStatus.STARTED;
}
@Override
public void stop() {
/**
* I will remove all the event listeners registered with the event manager.
*/
for (EventListener eventListener : listenersAdded) {
eventManager.removeListener(eventListener);
}
listenersAdded.clear();
this.serviceStatus = ServiceStatus.STOPPED;
}
@Override
public ServiceStatus getStatus() {
return this.serviceStatus;
}
private void recoverLastState () throws CantCreateDeviceUserException {
try {
PlatformTextFile platformStateFile = null;
try{
platformStateFile = this.platformFileSystem.getFile(
DeviceDirectory.PLATFORM.getName(),
PlatformFileName.LAST_STATE.getFileName(),
FilePrivacy.PRIVATE, FileLifeSpan.PERMANENT
);
}catch(CantCreateFileException cantCreateFileException)
{
/**
* This really should never happen. But if it does...
*/
System.err.println("CantCreateFileException: " + cantCreateFileException.getMessage());
cantCreateFileException.printStackTrace();
// throw new CantStartPlatformException(); TODO: Luis checkear esto
}
try {
platformStateFile.persistToMedia();
}
catch (CantPersistFileException cantLoadFileException) {
/**
* This really should never happen. But if it does...
*/
System.err.println("CantLoadFileException: " + cantLoadFileException.getMessage());
cantLoadFileException.printStackTrace();
// throw new CantStartPlatformException(); TODO: Luis checkear esto
}
UUID userId = UUID.fromString(platformStateFile.getContent());
// Luis TODO: de aca tiene que sacar no solo el usuario sino tambien el modulo donde estuvo por ultima vez
try
{
((DeviceUserManager) this).loadUser(userId);
}
catch (CantLoadDeviceUserException cantLoadDeviceUserException)
{
/**
* This really should never happen. But if it does...
*/
System.err.println("CantLoadUserException: " + cantLoadDeviceUserException.getMessage());
cantLoadDeviceUserException.printStackTrace();
// throw new CantStartPlatformException(); TODO: Luis checkear esto
}
}
catch (FileNotFoundException fileNotFoundException)
{
/**
* If there is no last state file, I assume this is the first time the platform is running on this device.
* Under this situation I will do the following;
*
* 1) Create a new User with no password.
* 2) Auto login that user.
* 3) Save the last state of the platform.
*/
DeviceUser newDeviceUser = this.createUser();
try {
newDeviceUser.login("");
// Luis TODO; como se conecta esto con el communication layer que usa el usuario logeado del Platform Context?
} catch (LoginFailedException exception) {
/**
* This really should never happen. But if it does...
*/
System.err.println("LoginFailedException or CantCreateUserException: " + exception.getMessage());
exception.printStackTrace();
// throw new CantStartPlatformException(); TODO: Luis checkear esto
}
PlatformTextFile platformStateFile = null;
try{
platformStateFile = this.platformFileSystem.createFile(
DeviceDirectory.PLATFORM.getName(),
PlatformFileName.LAST_STATE.getFileName(),
FilePrivacy.PRIVATE, FileLifeSpan.PERMANENT
);
}catch(CantCreateFileException cantCreateFileException)
{
/**
* This really should never happen. But if it does...
*/
System.err.println("cantCreateFileException: " + cantCreateFileException.getMessage());
cantCreateFileException.printStackTrace();
// throw new CantStartPlatformException(); TODO: Luis checkear esto
}
String content = newDeviceUser.getId().toString(); //+ ";" + Modules.WALLET_RUNTIME.getModuleName();
platformStateFile.setContent(content);
try {
platformStateFile.persistToMedia();
} catch (CantPersistFileException cantPersistFileException) {
/**
* This really should never happen. But if it does...
*/
System.err.println("Cant persist com.bitdubai.platform state to media: " + cantPersistFileException.getMessage());
cantPersistFileException.printStackTrace();
// throw new CantStartPlatformException(); TODO: Luis checkear esto
}
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hslf.model;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.List;
import org.apache.poi.POIDataSamples;
import org.apache.poi.hslf.model.textproperties.CharFlagsTextProp;
import org.apache.poi.hslf.record.Environment;
import org.apache.poi.hslf.record.TextHeaderAtom;
import org.apache.poi.hslf.usermodel.*;
import org.junit.Test;
/**
* Tests for SlideMaster
*
* @author Yegor Kozlov
*/
public final class TestSlideMaster {
private static POIDataSamples _slTests = POIDataSamples.getSlideShowInstance();
/**
* The reference ppt has two masters.
* Check we can read their attributes.
*/
@Test
public void testSlideMaster() throws Exception {
HSLFSlideShow ppt = new HSLFSlideShow(_slTests.openResourceAsStream("slide_master.ppt"));
Environment env = ppt.getDocumentRecord().getEnvironment();
List<HSLFSlideMaster> master = ppt.getSlideMasters();
assertEquals(2, master.size());
//character attributes
assertEquals(40, master.get(0).getStyleAttribute(TextHeaderAtom.TITLE_TYPE, 0, "font.size", true).getValue());
assertEquals(48, master.get(1).getStyleAttribute(TextHeaderAtom.TITLE_TYPE, 0, "font.size", true).getValue());
int font1 = master.get(0).getStyleAttribute(TextHeaderAtom.TITLE_TYPE, 0, "font.index", true).getValue();
int font2 = master.get(1).getStyleAttribute(TextHeaderAtom.TITLE_TYPE, 0, "font.index", true).getValue();
assertEquals("Arial", env.getFontCollection().getFontWithId(font1));
assertEquals("Georgia", env.getFontCollection().getFontWithId(font2));
CharFlagsTextProp prop1 = (CharFlagsTextProp)master.get(0).getStyleAttribute(TextHeaderAtom.TITLE_TYPE, 0, "char_flags", true);
assertEquals(false, prop1.getSubValue(CharFlagsTextProp.BOLD_IDX));
assertEquals(false, prop1.getSubValue(CharFlagsTextProp.ITALIC_IDX));
assertEquals(true, prop1.getSubValue(CharFlagsTextProp.UNDERLINE_IDX));
CharFlagsTextProp prop2 = (CharFlagsTextProp)master.get(1).getStyleAttribute(TextHeaderAtom.TITLE_TYPE, 0, "char_flags", true);
assertEquals(false, prop2.getSubValue(CharFlagsTextProp.BOLD_IDX));
assertEquals(true, prop2.getSubValue(CharFlagsTextProp.ITALIC_IDX));
assertEquals(false, prop2.getSubValue(CharFlagsTextProp.UNDERLINE_IDX));
//now paragraph attributes
assertEquals(0x266B, master.get(0).getStyleAttribute(TextHeaderAtom.BODY_TYPE, 0, "bullet.char", false).getValue());
assertEquals(0x2022, master.get(1).getStyleAttribute(TextHeaderAtom.BODY_TYPE, 0, "bullet.char", false).getValue());
int b1 = master.get(0).getStyleAttribute(TextHeaderAtom.BODY_TYPE, 0, "bullet.font", false).getValue();
int b2 = master.get(1).getStyleAttribute(TextHeaderAtom.BODY_TYPE, 0, "bullet.font", false).getValue();
assertEquals("Arial", env.getFontCollection().getFontWithId(b1));
assertEquals("Georgia", env.getFontCollection().getFontWithId(b2));
}
/**
* Test we can read default text attributes for a title master sheet
*/
@Test
public void testTitleMasterTextAttributes() throws Exception {
HSLFSlideShow ppt = new HSLFSlideShow(_slTests.openResourceAsStream("slide_master.ppt"));
List<HSLFTitleMaster> master = ppt.getTitleMasters();
assertEquals(1, master.size());
assertEquals(32, master.get(0).getStyleAttribute(TextHeaderAtom.CENTER_TITLE_TYPE, 0, "font.size", true).getValue());
CharFlagsTextProp prop1 = (CharFlagsTextProp)master.get(0).getStyleAttribute(TextHeaderAtom.CENTER_TITLE_TYPE, 0, "char_flags", true);
assertEquals(true, prop1.getSubValue(CharFlagsTextProp.BOLD_IDX));
assertEquals(false, prop1.getSubValue(CharFlagsTextProp.ITALIC_IDX));
assertEquals(true, prop1.getSubValue(CharFlagsTextProp.UNDERLINE_IDX));
assertEquals(20, master.get(0).getStyleAttribute(TextHeaderAtom.CENTRE_BODY_TYPE, 0, "font.size", true).getValue());
CharFlagsTextProp prop2 = (CharFlagsTextProp)master.get(0).getStyleAttribute(TextHeaderAtom.CENTRE_BODY_TYPE, 0, "char_flags", true);
assertEquals(true, prop2.getSubValue(CharFlagsTextProp.BOLD_IDX));
assertEquals(false, prop2.getSubValue(CharFlagsTextProp.ITALIC_IDX));
assertEquals(false, prop2.getSubValue(CharFlagsTextProp.UNDERLINE_IDX));
}
/**
* Slide 3 has title layout and follows the TitleMaster. Verify that.
*/
@Test
public void testTitleMaster() throws Exception {
HSLFSlideShow ppt = new HSLFSlideShow(_slTests.openResourceAsStream("slide_master.ppt"));
HSLFSlide slide = ppt.getSlides().get(2);
HSLFMasterSheet masterSheet = slide.getMasterSheet();
assertTrue(masterSheet instanceof HSLFTitleMaster);
for (List<HSLFTextParagraph> txt : slide.getTextParagraphs()) {
HSLFTextRun rt = txt.get(0).getTextRuns().get(0);
switch(txt.get(0).getRunType()){
case TextHeaderAtom.CENTER_TITLE_TYPE:
assertEquals("Arial", rt.getFontFamily());
assertEquals(32, rt.getFontSize(), 0);
assertEquals(true, rt.isBold());
assertEquals(true, rt.isUnderlined());
break;
case TextHeaderAtom.CENTRE_BODY_TYPE:
assertEquals("Courier New", rt.getFontFamily());
assertEquals(20, rt.getFontSize(), 0);
assertEquals(true, rt.isBold());
assertEquals(false, rt.isUnderlined());
break;
}
}
}
/**
* If a style attribute is not set ensure it is read from the master
*/
@Test
public void testMasterAttributes() throws Exception {
HSLFSlideShow ppt = new HSLFSlideShow(_slTests.openResourceAsStream("slide_master.ppt"));
List<HSLFSlide> slide = ppt.getSlides();
assertEquals(3, slide.size());
for (List<HSLFTextParagraph> tparas : slide.get(0).getTextParagraphs()) {
HSLFTextParagraph tpara = tparas.get(0);
if (tpara.getRunType() == TextHeaderAtom.TITLE_TYPE){
HSLFTextRun rt = tpara.getTextRuns().get(0);
assertEquals(40, rt.getFontSize(), 0);
assertEquals(true, rt.isUnderlined());
assertEquals("Arial", rt.getFontFamily());
} else if (tpara.getRunType() == TextHeaderAtom.BODY_TYPE){
HSLFTextRun rt = tpara.getTextRuns().get(0);
assertEquals(0, tpara.getIndentLevel());
assertEquals(32, rt.getFontSize(), 0);
assertEquals("Arial", rt.getFontFamily());
tpara = tparas.get(1);
rt = tpara.getTextRuns().get(0);
assertEquals(1, tpara.getIndentLevel());
assertEquals(28, rt.getFontSize(), 0);
assertEquals("Arial", rt.getFontFamily());
}
}
for (List<HSLFTextParagraph> tparas : slide.get(1).getTextParagraphs()) {
HSLFTextParagraph tpara = tparas.get(0);
if (tpara.getRunType() == TextHeaderAtom.TITLE_TYPE){
HSLFTextRun rt = tpara.getTextRuns().get(0);
assertEquals(48, rt.getFontSize(), 0);
assertEquals(true, rt.isItalic());
assertEquals("Georgia", rt.getFontFamily());
} else if (tpara.getRunType() == TextHeaderAtom.BODY_TYPE){
HSLFTextRun rt;
rt = tpara.getTextRuns().get(0);
assertEquals(0, tpara.getIndentLevel());
assertEquals(32, rt.getFontSize(), 0);
assertEquals("Courier New", rt.getFontFamily());
}
}
}
/**
* Check we can dynamically assign a slide master to a slide.
*/
@Test
public void testChangeSlideMaster() throws Exception {
HSLFSlideShow ppt = new HSLFSlideShow(_slTests.openResourceAsStream("slide_master.ppt"));
List<HSLFSlideMaster> master = ppt.getSlideMasters();
List<HSLFSlide> slide = ppt.getSlides();
int sheetNo;
//each slide uses its own master
assertEquals(slide.get(0).getMasterSheet()._getSheetNumber(), master.get(0)._getSheetNumber());
assertEquals(slide.get(1).getMasterSheet()._getSheetNumber(), master.get(1)._getSheetNumber());
//all slides use the first master slide
sheetNo = master.get(0)._getSheetNumber();
for (HSLFSlide s : slide) {
s.setMasterSheet(master.get(0));
}
ByteArrayOutputStream out;
out = new ByteArrayOutputStream();
ppt.write(out);
out.close();
ppt = new HSLFSlideShow(new HSLFSlideShowImpl(new ByteArrayInputStream(out.toByteArray())));
master = ppt.getSlideMasters();
slide = ppt.getSlides();
for (HSLFSlide s : slide) {
assertEquals(sheetNo, s.getMasterSheet()._getSheetNumber());
}
}
/**
* Varify we can read attrubutes for different identtation levels.
* (typical for the "bullted body" placeholder)
*/
@Test
public void testIndentation() throws Exception {
HSLFSlideShow ppt = new HSLFSlideShow(_slTests.openResourceAsStream("slide_master.ppt"));
HSLFSlide slide = ppt.getSlides().get(0);
for (List<HSLFTextParagraph> tparas : slide.getTextParagraphs()) {
HSLFTextParagraph tpara = tparas.get(0);
if (tpara.getRunType() == TextHeaderAtom.TITLE_TYPE){
HSLFTextRun rt = tpara.getTextRuns().get(0);
assertEquals(40, rt.getFontSize(), 0);
assertEquals(true, rt.isUnderlined());
assertEquals("Arial", rt.getFontFamily());
} else if (tpara.getRunType() == TextHeaderAtom.BODY_TYPE){
int indents[] = { 32, 28, 24 };
for (HSLFTextRun rt : tpara.getTextRuns()) {
int indent = tpara.getIndentLevel();
assertEquals(indents[indent], rt.getFontSize(), 0);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* IDREF.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: SNAPSHOT Built on : Dec 21, 2007 (04:03:30 LKT)
*/
package org.apache.axis2.databinding.types.soapencoding;
/**
* IDREF bean class
*/
public class IDREF
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = IDREF
Namespace URI = http://schemas.xmlsoap.org/soap/encoding/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://schemas.xmlsoap.org/soap/encoding/")){
return "SOAP-ENC";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for IDREF
*/
protected org.apache.axis2.databinding.types.IDRef localIDREF ;
/**
* Auto generated getter method
* @return org.apache.axis2.databinding.types.IDRef
*/
public org.apache.axis2.databinding.types.IDRef getIDREF(){
return localIDREF;
}
/**
* Auto generated setter method
* @param param IDREF
*/
public void setIDREF(org.apache.axis2.databinding.types.IDRef param){
this.localIDREF=param;
}
public java.lang.String toString(){
return localIDREF.toString();
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
IDREF.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://schemas.xmlsoap.org/soap/encoding/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":IDREF",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"IDREF",
xmlWriter);
}
}
if (localIDREF==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("IDREF cannot be null!!");
}else{
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localIDREF));
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(org.apache.axis2.databinding.utils.reader.ADBXMLStreamReader.ELEMENT_TEXT);
if (localIDREF != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localIDREF));
} else {
throw new org.apache.axis2.databinding.ADBException("IDREF cannot be null!!");
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
public static IDREF fromString(java.lang.String value,
java.lang.String namespaceURI){
IDREF returnValue = new IDREF();
returnValue.setIDREF(
org.apache.axis2.databinding.utils.ConverterUtil.convertToIDREF(value));
return returnValue;
}
public static IDREF fromString(javax.xml.stream.XMLStreamReader xmlStreamReader,
java.lang.String content) {
if (content.indexOf(":") > -1){
java.lang.String prefix = content.substring(0,content.indexOf(":"));
java.lang.String namespaceUri = xmlStreamReader.getNamespaceContext().getNamespaceURI(prefix);
return IDREF.Factory.fromString(content,namespaceUri);
} else {
return IDREF.Factory.fromString(content,"");
}
}
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static IDREF parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
IDREF object =
new IDREF();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"IDREF".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (IDREF)org.apache.axis2.databinding.types.soapencoding.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while(!reader.isEndElement()) {
if (reader.isStartElement() || reader.hasText()){
if (reader.isStartElement() || reader.hasText()){
java.lang.String content = reader.getElementText();
object.setIDREF(
org.apache.axis2.databinding.utils.ConverterUtil.convertToIDREF(content));
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2021 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.metainject;
import com.google.common.annotations.VisibleForTesting;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.RowSet;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.injection.bean.BeanInjectionInfo;
import org.pentaho.di.core.injection.bean.BeanInjector;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaBase;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.trans.RowProducer;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.TransStoppedListener;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.RowAdapter;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInjectionMetaEntry;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInjectionInterface;
import org.pentaho.di.trans.step.StepMetaInterface;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Read a simple CSV file Just output Strings found in the file...
*
* @author Matt
* @since 2007-07-05
*/
public class MetaInject extends BaseStep implements StepInterface {
private static Class<?> PKG = MetaInject.class; // for i18n purposes, needed by Translator2!!
//Added for PDI-17530
private static final Lock repoSaveLock = new ReentrantLock();
private MetaInjectMeta meta;
private MetaInjectData data;
public MetaInject(
StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) {
super( stepMeta, stepDataInterface, copyNr, transMeta, trans );
}
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
meta = (MetaInjectMeta) smi;
data = (MetaInjectData) sdi;
// Read the data from all input steps and keep it in memory...
// Skip the step from which we stream data. Keep that available for runtime action.
//
data.rowMap = new HashMap<String, List<RowMetaAndData>>();
for ( String prevStepName : getTransMeta().getPrevStepNames( getStepMeta() ) ) {
// Don't read from the streaming source step
//
if ( !data.streaming || !prevStepName.equalsIgnoreCase( data.streamingSourceStepname ) ) {
List<RowMetaAndData> list = new ArrayList<RowMetaAndData>();
RowSet rowSet = findInputRowSet( prevStepName );
Object[] row = getRowFrom( rowSet );
while ( row != null ) {
RowMetaAndData rd = new RowMetaAndData();
rd.setRowMeta( rowSet.getRowMeta() );
rd.setData( row );
list.add( rd );
row = getRowFrom( rowSet );
}
if ( !list.isEmpty() ) {
data.rowMap.put( prevStepName, list );
}
}
}
List<StepMeta> steps = data.transMeta.getSteps();
for ( Map.Entry<String, StepMetaInterface> en : data.stepInjectionMetasMap.entrySet() ) {
newInjection( en.getKey(), en.getValue() );
}
/*
* constants injection should be executed after steps, because if constant should be inserted into target with array
* in path, constants should be inserted into all arrays items
*/
for ( Map.Entry<String, StepMetaInterface> en : data.stepInjectionMetasMap.entrySet() ) {
newInjectionConstants( en.getKey(), en.getValue() );
}
for ( Map.Entry<String, StepMetaInterface> en : data.stepInjectionMetasMap.entrySet() ) {
en.getValue().searchInfoAndTargetSteps( steps );
}
for ( String targetStepName : data.stepInjectionMap.keySet() ) {
if ( !data.stepInjectionMetasMap.containsKey( targetStepName ) ) {
oldInjection( targetStepName );
StepMeta targetStep = StepMeta.findStep( steps, targetStepName );
if ( targetStep != null ) {
targetStep.getStepMetaInterface().searchInfoAndTargetSteps( steps );
}
}
}
if ( !meta.isNoExecution() ) {
// Now we can execute this modified transformation metadata.
//
final Trans injectTrans = createInjectTrans();
injectTrans.setParentTrans( getTrans() );
injectTrans.setMetaStore( getMetaStore() );
if ( getTrans().getParentJob() != null ) {
injectTrans.setParentJob( getTrans().getParentJob() ); // See PDI-13224
}
getTrans().addTransStoppedListener( new TransStoppedListener() {
public void transStopped( Trans parentTrans ) {
injectTrans.stopAll();
}
} );
injectTrans.prepareExecution( null );
// See if we need to stream some data over...
//
RowProducer rowProducer = null;
if ( data.streaming ) {
rowProducer = injectTrans.addRowProducer( data.streamingTargetStepname, 0 );
}
// Finally, add the mapping transformation to the active sub-transformations
// map in the parent transformation
//
getTrans().addActiveSubTransformation( getStepname(), injectTrans );
if ( !Utils.isEmpty( meta.getSourceStepName() ) ) {
StepInterface stepInterface = injectTrans.getStepInterface( meta.getSourceStepName(), 0 );
if ( stepInterface == null ) {
throw new KettleException( "Unable to find step '" + meta.getSourceStepName() + "' to read from." );
}
stepInterface.addRowListener( new RowAdapter() {
@Override
public void rowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) throws KettleStepException {
// Just pass along the data as output of this step...
//
MetaInject.this.putRow( rowMeta, row );
}
} );
}
injectTrans.startThreads();
if ( data.streaming ) {
// Deplete all the rows from the parent transformation into the modified transformation
//
RowSet rowSet = findInputRowSet( data.streamingSourceStepname );
if ( rowSet == null ) {
throw new KettleException( "Unable to find step '" + data.streamingSourceStepname + "' to stream data from" );
}
Object[] row = getRowFrom( rowSet );
while ( row != null && !isStopped() ) {
rowProducer.putRow( rowSet.getRowMeta(), row );
row = getRowFrom( rowSet );
}
rowProducer.finished();
}
// Wait until the child transformation finished processing...
//
while ( !injectTrans.isFinished() && !injectTrans.isStopped() && !isStopped() ) {
copyResult( injectTrans );
// Wait a little bit.
try {
Thread.sleep( 50 );
} catch ( Exception e ) {
// Ignore errors
}
}
copyResult( injectTrans );
waitUntilFinished( injectTrans );
}
// let the transformation complete it's execution to allow for any customizations to MDI to happen in the init methods of steps
if ( log.isDetailed() ) {
logDetailed( "XML of transformation after injection: " + data.transMeta.getXML() );
}
String targetFile = environmentSubstitute( meta.getTargetFile() );
if ( !Utils.isEmpty( targetFile ) ) {
writeInjectedKtr( targetFile );
}
// All done!
setOutputDone();
return false;
}
void waitUntilFinished( Trans injectTrans ) {
injectTrans.waitUntilFinished();
}
Trans createInjectTrans() {
return new Trans( data.transMeta, this );
}
private boolean shouldWriteToFilesystem() {
boolean forceWriteInFilesystem = ValueMetaBase.convertStringToBoolean(
Const.NVL( getVariable( Const.KETTLE_COMPATIBILITY_MDI_INJECTED_FILE_ALWAYS_IN_FILESYSTEM ), "N" ) );
return getRepository() == null || forceWriteInFilesystem;
}
@Override
public Repository getRepository() {
//Repository may be null if executing remotely in Pentaho Server
Repository repository = super.getRepository();
return repository != null ? repository : getTransMeta().getRepository();
}
@VisibleForTesting
void writeInjectedKtr( String targetFilPath ) throws KettleException {
if ( shouldWriteToFilesystem() ) {
writeInjectedKtrToFs( targetFilPath );
} else {
writeInjectedKtrToRepo( targetFilPath );
}
}
/**
* Writes the generated meta injection transformation to the file system.
* @param targetFilePath the filesystem path to which to save the generated injection ktr
* @throws KettleException
*/
@VisibleForTesting
void writeInjectedKtrToFs( String targetFilePath ) throws KettleException {
OutputStream os = null;
try {
//don't clear all of the clone's data before copying from the source object
TransMeta generatedTransMeta = (TransMeta) data.transMeta.realClone( false );
File injectedKtrFile = new File( targetFilePath );
if ( injectedKtrFile == null ) {
throw new IOException();
} else {
String transName = injectedKtrFile.getName().replace( ".ktr", "" );
generatedTransMeta.setName( transName ); // set transname on injectedtrans to be same as filename w/o extension
}
os = KettleVFS.getOutputStream( targetFilePath, false );
os.write( XMLHandler.getXMLHeader().getBytes( Const.XML_ENCODING ) );
os.write( generatedTransMeta.getXML().getBytes( Const.XML_ENCODING ) );
} catch ( IOException e ) {
throw new KettleException( "Unable to write target file (ktr after injection) to file '"
+ targetFilePath + "'", e );
} finally {
if ( os != null ) {
try {
os.close();
} catch ( Exception e ) {
throw new KettleException( e );
}
}
}
}
/**
* Writes the generated meta injection transformation to the repository. It is assumed that the repository
* exists (user is connected).
* @param targetFilePath the repo path to which to save the generated injection ktr
* @throws KettleException
*/
@VisibleForTesting
void writeInjectedKtrToRepo( final String targetFilePath ) throws KettleException {
try {
repoSaveLock.lock();
// clone the transMeta associated with the data, this is the generated meta injection transformation
// don't clear all of the clone's data before copying from the source object
final TransMeta generatedTrans = (TransMeta) data.transMeta.realClone( false );
// the targetFilePath holds the absolute repo path that is the requested destination of this generated
// transformation, extract the file name (no extension) and the containing directory and adjust the generated
// transformation properties accordingly
List<String> targetPath = new ArrayList( Arrays.asList( Const.splitPath( targetFilePath,
RepositoryDirectory.DIRECTORY_SEPARATOR ) ) );
final String fileName = targetPath.get( targetPath.size() - 1 ).replace( ".ktr", "" );
generatedTrans.setName( fileName );
// remove the last targetPath element, so we're left with the target directory path
targetPath.remove( targetPath.size() - 1 );
if ( targetPath.size() > 0 ) {
final String dirPath = String.join( RepositoryDirectory.DIRECTORY_SEPARATOR, targetPath );
RepositoryDirectoryInterface directory = getRepository().findDirectory( dirPath );
// if the directory does not exist, try to create it
if ( directory == null ) {
directory = getRepository().createRepositoryDirectory( new RepositoryDirectory( null, "/" ), dirPath );
}
generatedTrans.setRepositoryDirectory( directory );
} else {
// if the directory is null, set it to the directory of the cloned template ktr
if ( log.isDebug() ) {
log.logDebug( "The target injection ktr file path provided by the user is not a valid fully qualified "
+ "repository path - will store the generated ktr in the same directory as the template ktr: ",
data.transMeta.getRepositoryDirectory() );
}
generatedTrans.setRepositoryDirectory( data.transMeta.getRepositoryDirectory() );
}
// set the objectId, in case the injected transformation already exists in the repo, so that is is updated in
// the repository - the objectId will remain null, if the transformation is begin generated for the first time,
// in which a new ktr will be created in the repo
generatedTrans.setObjectId( getRepository().getTransformationID( fileName, generatedTrans.getRepositoryDirectory() ) );
getRepository().save( generatedTrans, null, null, true );
} finally {
repoSaveLock.unlock();
}
}
/**
* Inject values from steps.
*/
private void newInjection( String targetStep, StepMetaInterface targetStepMeta ) throws KettleException {
if ( log.isDetailed() ) {
logDetailed( "Handing step '" + targetStep + "' injection!" );
}
BeanInjectionInfo injectionInfo = new BeanInjectionInfo( targetStepMeta.getClass() );
BeanInjector injector = new BeanInjector( injectionInfo );
// Collect all the metadata for this target step...
//
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
boolean wasInjection = false;
for ( TargetStepAttribute target : targetMap.keySet() ) {
SourceStepField source = targetMap.get( target );
if ( target.getStepname().equalsIgnoreCase( targetStep ) ) {
// This is the step to collect data for...
// We also know which step to read the data from. (source)
//
if ( source.getStepname() != null ) {
// from specified steo
List<RowMetaAndData> rows = data.rowMap.get( source.getStepname() );
if ( rows != null && !rows.isEmpty() ) {
// Which metadata key is this referencing? Find the attribute key in the metadata entries...
//
if ( injector.hasProperty( targetStepMeta, target.getAttributeKey() ) ) {
// target step has specified key
boolean skip = false;
for ( RowMetaAndData r : rows ) {
if ( r.getRowMeta().indexOfValue( source.getField() ) < 0 ) {
logError( BaseMessages.getString( PKG, "MetaInject.SourceFieldIsNotDefined.Message", source
.getField(), getTransMeta().getName() ) );
// source step doesn't contain specified field
skip = true;
}
}
if ( !skip ) {
// specified field exist - need to inject
injector.setProperty( targetStepMeta, target.getAttributeKey(), rows, source.getField() );
wasInjection = true;
}
} else {
// target step doesn't have specified key - just report but don't fail like in 6.0 (BACKLOG-6753)
logError( BaseMessages.getString( PKG, "MetaInject.TargetKeyIsNotDefined.Message", target
.getAttributeKey(), getTransMeta().getName() ) );
}
}
}
}
}
if ( wasInjection ) {
injector.runPostInjectionProcessing( targetStepMeta );
}
}
/**
* Inject constant values.
*/
private void newInjectionConstants( String targetStep, StepMetaInterface targetStepMeta ) throws KettleException {
if ( log.isDetailed() ) {
logDetailed( "Handing step '" + targetStep + "' constants injection!" );
}
BeanInjectionInfo injectionInfo = new BeanInjectionInfo( targetStepMeta.getClass() );
BeanInjector injector = new BeanInjector( injectionInfo );
// Collect all the metadata for this target step...
boolean wasInjection = false;
for ( Map.Entry<TargetStepAttribute, SourceStepField> entry : meta.getTargetSourceMapping().entrySet() ) {
TargetStepAttribute target = entry.getKey();
SourceStepField source = entry.getValue();
if ( target.getStepname().equalsIgnoreCase( targetStep ) ) {
// This is the step to collect data for...
// We also know which step to read the data from. (source)
//
if ( source.getStepname() == null ) {
// inject constant
if ( injector.hasProperty( targetStepMeta, target.getAttributeKey() ) ) {
// target step has specified key
injector.setProperty( targetStepMeta, target.getAttributeKey(), null, source.getField() );
wasInjection = true;
} else {
// target step doesn't have specified key - just report but don't fail like in 6.0 (BACKLOG-6753)
logError( BaseMessages.getString( PKG, "MetaInject.TargetKeyIsNotDefined.Message", target.getAttributeKey(),
getTransMeta().getName() ) );
}
}
}
}
// NOTE: case when only 1 field out of group is supplied constant, need to populate other fields
if ( wasInjection ) {
injector.runPostInjectionProcessing( targetStepMeta );
}
}
private void oldInjection( String targetStep ) throws KettleException {
if ( log.isDetailed() ) {
logDetailed( "Handing step '" + targetStep + "' injection!" );
}
// This is the injection interface:
//
StepMetaInjectionInterface injectionInterface = data.stepInjectionMap.get( targetStep );
// This is the injection description:
//
List<StepInjectionMetaEntry> metadataEntries = injectionInterface.getStepInjectionMetadataEntries();
// Create a new list of metadata injection entries...
//
List<StepInjectionMetaEntry> inject = new ArrayList<StepInjectionMetaEntry>();
// Collect all the metadata for this target step...
//
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
for ( TargetStepAttribute target : targetMap.keySet() ) {
SourceStepField source = targetMap.get( target );
if ( target.getStepname().equalsIgnoreCase( targetStep ) ) {
// This is the step to collect data for...
// We also know which step to read the data from. (source)
//
List<RowMetaAndData> rows = data.rowMap.get( source.getStepname() );
if ( rows != null && rows.size() > 0 ) {
// Which metadata key is this referencing? Find the attribute key in the metadata entries...
//
StepInjectionMetaEntry entry = findMetaEntry( metadataEntries, target.getAttributeKey() );
if ( entry != null ) {
if ( !target.isDetail() ) {
setEntryValueIfFieldExists( entry, rows.get( 0 ), source );
inject.add( entry );
} else {
// We are going to pass this entry N times for N target mappings
// As such, we have to see if it's already in the injection list...
//
StepInjectionMetaEntry metaEntries = findMetaEntry( inject, entry.getKey() );
if ( metaEntries == null ) {
StepInjectionMetaEntry rootEntry = findDetailRootEntry( metadataEntries, entry );
// Inject an empty copy
//
metaEntries = rootEntry.clone();
metaEntries.setDetails( new ArrayList<StepInjectionMetaEntry>() );
inject.add( metaEntries );
// We also need to pre-populate the whole grid: X rows by Y attributes
//
StepInjectionMetaEntry metaEntry = rootEntry.getDetails().get( 0 );
for ( int i = 0; i < rows.size(); i++ ) {
StepInjectionMetaEntry metaCopy = metaEntry.clone();
metaEntries.getDetails().add( metaCopy );
metaCopy.setDetails( new ArrayList<StepInjectionMetaEntry>() );
for ( StepInjectionMetaEntry me : metaEntry.getDetails() ) {
StepInjectionMetaEntry meCopy = me.clone();
metaCopy.getDetails().add( meCopy );
}
}
// From now on we can simply refer to the correct X,Y coordinate.
} else {
StepInjectionMetaEntry rootEntry = findDetailRootEntry( inject, metaEntries );
metaEntries = rootEntry;
}
for ( int i = 0; i < rows.size(); i++ ) {
RowMetaAndData row = rows.get( i );
try {
List<StepInjectionMetaEntry> rowEntries = metaEntries.getDetails().get( i ).getDetails();
for ( StepInjectionMetaEntry rowEntry : rowEntries ) {
// We have to look up the sources for these targets again in the target-2-source mapping
// That is because we only want handle this as few times as possible...
//
SourceStepField detailSource = findDetailSource( targetMap, targetStep, rowEntry.getKey() );
if ( detailSource != null ) {
setEntryValueIfFieldExists( rowEntry, row, detailSource );
} else {
if ( log.isDetailed() ) {
logDetailed( "No detail source found for key: " + rowEntry.getKey() + " and target step: "
+ targetStep );
}
}
}
} catch ( Exception e ) {
throw new KettleException( "Unexpected error occurred while injecting metadata", e );
}
}
if ( log.isDetailed() ) {
logDetailed( "injected entry: " + entry );
}
}
// End of TopLevel/Detail if block
} else {
if ( log.isDetailed() ) {
logDetailed( "entry not found: " + target.getAttributeKey() );
}
}
} else {
if ( log.isDetailed() ) {
logDetailed( "No rows found for source step: " + source.getStepname() );
}
}
}
}
// Inject the metadata into the step!
//
injectionInterface.injectStepMetadataEntries( inject );
}
private void copyResult( Trans trans ) {
Result result = trans.getResult();
setLinesInput( result.getNrLinesInput() );
setLinesOutput( result.getNrLinesOutput() );
setLinesRead( result.getNrLinesRead() );
setLinesWritten( result.getNrLinesWritten() );
setLinesUpdated( result.getNrLinesUpdated() );
setLinesRejected( result.getNrLinesRejected() );
setErrors( result.getNrErrors() );
}
private StepInjectionMetaEntry findDetailRootEntry( List<StepInjectionMetaEntry> metadataEntries,
StepInjectionMetaEntry entry ) {
for ( StepInjectionMetaEntry rowsEntry : metadataEntries ) {
for ( StepInjectionMetaEntry rowEntry : rowsEntry.getDetails() ) {
for ( StepInjectionMetaEntry detailEntry : rowEntry.getDetails() ) {
if ( detailEntry.equals( entry ) ) {
return rowsEntry;
}
}
}
}
return null;
}
private SourceStepField findDetailSource( Map<TargetStepAttribute, SourceStepField> targetMap, String targetStep,
String key ) {
return targetMap.get( new TargetStepAttribute( targetStep, key, true ) );
}
private StepInjectionMetaEntry findMetaEntry( List<StepInjectionMetaEntry> metadataEntries, String attributeKey ) {
for ( StepInjectionMetaEntry entry : metadataEntries ) {
if ( entry.getKey().equals( attributeKey ) ) {
return entry;
}
entry = findMetaEntry( entry.getDetails(), attributeKey );
if ( entry != null ) {
return entry;
}
}
return null;
}
/**
* package-local visibility for testing purposes
*/
void setEntryValueIfFieldExists( StepInjectionMetaEntry entry, RowMetaAndData row, SourceStepField source )
throws KettleValueException {
RowMetaInterface rowMeta = row.getRowMeta();
if ( rowMeta.indexOfValue( source.getField() ) < 0 ) {
return;
}
setEntryValue( entry, row, source );
}
/**
* package-local visibility for testing purposes
*/
static void setEntryValue( StepInjectionMetaEntry entry, RowMetaAndData row, SourceStepField source )
throws KettleValueException {
// A standard attribute, a single row of data...
//
Object value = null;
switch ( entry.getValueType() ) {
case ValueMetaInterface.TYPE_STRING:
value = row.getString( source.getField(), null );
break;
case ValueMetaInterface.TYPE_BOOLEAN:
value = row.getBoolean( source.getField(), false );
break;
case ValueMetaInterface.TYPE_INTEGER:
value = row.getInteger( source.getField(), 0L );
break;
case ValueMetaInterface.TYPE_NUMBER:
value = row.getNumber( source.getField(), 0.0D );
break;
case ValueMetaInterface.TYPE_DATE:
value = row.getDate( source.getField(), null );
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
value = row.getBigNumber( source.getField(), null );
break;
default:
break;
}
entry.setValue( value );
}
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (MetaInjectMeta) smi;
data = (MetaInjectData) sdi;
if ( super.init( smi, sdi ) ) {
try {
meta.actualizeMetaInjectMapping();
data.transMeta = loadTransformationMeta();
data.transMeta.copyVariablesFrom( this );
data.transMeta.mergeParametersWith( this.getTrans(), true );
checkSoureStepsAvailability();
checkTargetStepsAvailability();
// Get a mapping between the step name and the injection...
//
// Get new injection info
data.stepInjectionMetasMap = new HashMap<String, StepMetaInterface>();
for ( StepMeta stepMeta : data.transMeta.getUsedSteps() ) {
StepMetaInterface meta = stepMeta.getStepMetaInterface();
if ( BeanInjectionInfo.isInjectionSupported( meta.getClass() ) ) {
data.stepInjectionMetasMap.put( stepMeta.getName(), meta );
}
}
// Get old injection info
data.stepInjectionMap = new HashMap<String, StepMetaInjectionInterface>();
for ( StepMeta stepMeta : data.transMeta.getUsedSteps() ) {
StepMetaInjectionInterface injectionInterface =
stepMeta.getStepMetaInterface().getStepMetaInjectionInterface();
if ( injectionInterface != null ) {
data.stepInjectionMap.put( stepMeta.getName(), injectionInterface );
}
}
// See if we need to stream data from a specific step into the template
//
if ( meta.getStreamSourceStep() != null && !Utils.isEmpty( meta.getStreamTargetStepname() ) ) {
data.streaming = true;
data.streamingSourceStepname = meta.getStreamSourceStep().getName();
data.streamingTargetStepname = meta.getStreamTargetStepname();
}
return true;
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "MetaInject.BadEncoding.Message" ), e );
return false;
}
}
return false;
}
private void checkTargetStepsAvailability() {
Set<String> existedStepNames = convertToUpperCaseSet( data.transMeta.getStepNames() );
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
Set<TargetStepAttribute> unavailableTargetSteps = getUnavailableTargetSteps( targetMap, data.transMeta );
Set<String> alreadyMarkedSteps = new HashSet<String>();
for ( TargetStepAttribute currentTarget : unavailableTargetSteps ) {
if ( alreadyMarkedSteps.contains( currentTarget.getStepname() ) ) {
continue;
}
alreadyMarkedSteps.add( currentTarget.getStepname() );
if ( existedStepNames.contains( currentTarget.getStepname().toUpperCase() ) ) {
logError( BaseMessages.getString( PKG, "MetaInject.TargetStepIsNotUsed.Message", currentTarget.getStepname(),
data.transMeta.getName() ) );
} else {
logError( BaseMessages.getString( PKG, "MetaInject.TargetStepIsNotDefined.Message", currentTarget.getStepname(),
data.transMeta.getName() ) );
}
}
// alreadyMarked contains wrong steps. Spoon can report error if it will not fail transformation [BACKLOG-6753]
}
public static void removeUnavailableStepsFromMapping( Map<TargetStepAttribute, SourceStepField> targetMap,
Set<SourceStepField> unavailableSourceSteps, Set<TargetStepAttribute> unavailableTargetSteps ) {
Iterator<Entry<TargetStepAttribute, SourceStepField>> targetMapIterator = targetMap.entrySet().iterator();
while ( targetMapIterator.hasNext() ) {
Entry<TargetStepAttribute, SourceStepField> entry = targetMapIterator.next();
SourceStepField currentSourceStepField = entry.getValue();
TargetStepAttribute currentTargetStepAttribute = entry.getKey();
if ( unavailableSourceSteps.contains( currentSourceStepField ) || unavailableTargetSteps.contains(
currentTargetStepAttribute ) ) {
targetMapIterator.remove();
}
}
}
public static Set<TargetStepAttribute> getUnavailableTargetSteps( Map<TargetStepAttribute, SourceStepField> targetMap,
TransMeta injectedTransMeta ) {
Set<String> usedStepNames = getUsedStepsForReferencendTransformation( injectedTransMeta );
Set<TargetStepAttribute> unavailableTargetSteps = new HashSet<TargetStepAttribute>();
for ( TargetStepAttribute currentTarget : targetMap.keySet() ) {
if ( !usedStepNames.contains( currentTarget.getStepname().toUpperCase() ) ) {
unavailableTargetSteps.add( currentTarget );
}
}
return Collections.unmodifiableSet( unavailableTargetSteps );
}
public static Set<TargetStepAttribute> getUnavailableTargetKeys( Map<TargetStepAttribute, SourceStepField> targetMap,
TransMeta injectedTransMeta, Set<TargetStepAttribute> unavailableTargetSteps ) {
Set<TargetStepAttribute> missingKeys = new HashSet<>();
Map<String, BeanInjectionInfo> beanInfos = getUsedStepBeanInfos( injectedTransMeta );
for ( TargetStepAttribute key : targetMap.keySet() ) {
if ( !unavailableTargetSteps.contains( key ) ) {
BeanInjectionInfo info = beanInfos.get( key.getStepname().toUpperCase() );
if ( info != null && !info.getProperties().containsKey( key.getAttributeKey() ) ) {
missingKeys.add( key );
}
}
}
return missingKeys;
}
private static Map<String, BeanInjectionInfo> getUsedStepBeanInfos( TransMeta transMeta ) {
Map<String, BeanInjectionInfo> res = new HashMap<>();
for ( StepMeta step : transMeta.getUsedSteps() ) {
Class<? extends StepMetaInterface> stepMetaClass = step.getStepMetaInterface().getClass();
if ( BeanInjectionInfo.isInjectionSupported( stepMetaClass ) ) {
res.put( step.getName().toUpperCase(), new BeanInjectionInfo( stepMetaClass ) );
}
}
return res;
}
private static Set<String> getUsedStepsForReferencendTransformation( TransMeta transMeta ) {
Set<String> usedStepNames = new HashSet<String>();
for ( StepMeta currentStep : transMeta.getUsedSteps() ) {
usedStepNames.add( currentStep.getName().toUpperCase() );
}
return usedStepNames;
}
public static Set<SourceStepField> getUnavailableSourceSteps( Map<TargetStepAttribute, SourceStepField> targetMap,
TransMeta sourceTransMeta, StepMeta stepMeta ) {
String[] stepNamesArray = sourceTransMeta.getPrevStepNames( stepMeta );
Set<String> existedStepNames = convertToUpperCaseSet( stepNamesArray );
Set<SourceStepField> unavailableSourceSteps = new HashSet<SourceStepField>();
for ( SourceStepField currentSource : targetMap.values() ) {
if ( currentSource.getStepname() != null ) {
if ( !existedStepNames.contains( currentSource.getStepname().toUpperCase() ) ) {
unavailableSourceSteps.add( currentSource );
}
}
}
return Collections.unmodifiableSet( unavailableSourceSteps );
}
private void checkSoureStepsAvailability() {
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
Set<SourceStepField> unavailableSourceSteps =
getUnavailableSourceSteps( targetMap, getTransMeta(), getStepMeta() );
Set<String> alreadyMarkedSteps = new HashSet<String>();
for ( SourceStepField currentSource : unavailableSourceSteps ) {
if ( alreadyMarkedSteps.contains( currentSource.getStepname() ) ) {
continue;
}
alreadyMarkedSteps.add( currentSource.getStepname() );
logError( BaseMessages.getString( PKG, "MetaInject.SourceStepIsNotAvailable.Message", currentSource.getStepname(),
getTransMeta().getName() ) );
}
// alreadyMarked contains wrong steps. Spoon can report error if it will not fail transformation [BACKLOG-6753]
}
/**
* package-local visibility for testing purposes
*/
static Set<String> convertToUpperCaseSet( String[] array ) {
if ( array == null ) {
return Collections.emptySet();
}
Set<String> strings = new HashSet<String>();
for ( String currentString : array ) {
strings.add( currentString.toUpperCase() );
}
return strings;
}
/**
* package-local visibility for testing purposes
*/
TransMeta loadTransformationMeta() throws KettleException {
return MetaInjectMeta.loadTransformationMeta( meta, getTrans().getRepository(), getTrans().getMetaStore(), this );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan.concurrent;
import static org.junit.Assert.*;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.wan.GatewaySender.OrderPolicy;
import org.apache.geode.internal.cache.wan.WANTestBase;
import org.apache.geode.test.dunit.AsyncInvocation;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.Wait;
import org.apache.geode.test.junit.categories.DistributedTest;
@Category(DistributedTest.class)
public class ConcurrentParallelGatewaySenderOperation_1_DUnitTest extends WANTestBase {
private static final long serialVersionUID = 1L;
public ConcurrentParallelGatewaySenderOperation_1_DUnitTest() {
super();
}
@Override
protected final void postSetUpWANTestBase() throws Exception {
IgnoredException.addIgnoredException("Broken pipe");
IgnoredException.addIgnoredException("Connection reset");
IgnoredException.addIgnoredException("Unexpected IOException");
}
@Test
public void testParallelGatewaySenderWithoutStarting() {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 6, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 6, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 6, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 6, OrderPolicy.KEY));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
vm4.invoke(() -> WANTestBase.verifySenderStoppedState("ln"));
vm5.invoke(() -> WANTestBase.verifySenderStoppedState("ln"));
vm6.invoke(() -> WANTestBase.verifySenderStoppedState("ln"));
vm7.invoke(() -> WANTestBase.verifySenderStoppedState("ln"));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 0));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 0));
}
/**
* Defect 44323 (ParallelGatewaySender should not be started on Accessor node)
*/
@Test
public void testParallelGatewaySenderStartOnAccessorNode() {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 7, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 7, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 7, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 7, OrderPolicy.KEY));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegionAsAccessor(getTestMethodName() + "_PR",
"ln", 1, 100));
vm7.invoke(() -> WANTestBase.createPartitionedRegionAsAccessor(getTestMethodName() + "_PR",
"ln", 1, 100));
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
// start the senders
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
vm4.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm5.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 1000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 1000));
}
/**
* Normal scenario in which the sender is paused in between.
*
* @throws Exception
*/
@Test
public void testParallelPropagationSenderPause() throws Exception {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 5, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 5, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 5, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 5, OrderPolicy.KEY));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
// make sure all the senders are running before doing any puts
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
// FIRST RUN: now, the senders are started. So, start the puts
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 100));
// now, pause all of the senders
vm4.invoke(() -> WANTestBase.pauseSender("ln"));
vm5.invoke(() -> WANTestBase.pauseSender("ln"));
vm6.invoke(() -> WANTestBase.pauseSender("ln"));
vm7.invoke(() -> WANTestBase.pauseSender("ln"));
Wait.pause(2000);
// SECOND RUN: keep one thread doing puts to the region
vm4.invokeAsync(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
// verify region size remains on remote vm and is restricted below a specified limit (i.e.
// number of puts in the first run)
vm2.invoke(() -> WANTestBase.validateRegionSizeRemainsSame(getTestMethodName() + "_PR", 100));
}
/**
* Normal scenario in which a paused sender is resumed.
*
* @throws Exception
*/
@Test
public void testParallelPropagationSenderResume() throws Exception {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 8, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 8, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 8, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 8, OrderPolicy.KEY));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
// make sure all the senders are running before doing any puts
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
// now, the senders are started. So, start the puts
vm4.invokeAsync(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
// now, pause all of the senders
vm4.invoke(() -> WANTestBase.pauseSender("ln"));
vm5.invoke(() -> WANTestBase.pauseSender("ln"));
vm6.invoke(() -> WANTestBase.pauseSender("ln"));
vm7.invoke(() -> WANTestBase.pauseSender("ln"));
// sleep for a second or two
Wait.pause(2000);
// resume the senders
vm4.invoke(() -> WANTestBase.resumeSender("ln"));
vm5.invoke(() -> WANTestBase.resumeSender("ln"));
vm6.invoke(() -> WANTestBase.resumeSender("ln"));
vm7.invoke(() -> WANTestBase.resumeSender("ln"));
Wait.pause(2000);
vm4.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm5.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm6.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm7.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
// find the region size on remote vm
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 1000));
}
/**
* Negative scenario in which a sender that is stopped (and not paused) is resumed. Expected:
* resume is only valid for pause. If a sender which is stopped is resumed, it will not be started
* again.
*
* @throws Exception
*/
@Test
public void testParallelPropagationSenderResumeNegativeScenario() throws Exception {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(nyPort, vm4, vm5);
vm4.invoke(() -> WANTestBase.createCache(lnPort));
vm5.invoke(() -> WANTestBase.createCache(lnPort));
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
startSenderInVMs("ln", vm4, vm5);
// wait till the senders are running
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
// start the puts
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 100));
// let the queue drain completely
vm4.invoke(() -> WANTestBase.validateQueueContents("ln", 0));
// stop the senders
vm4.invoke(() -> WANTestBase.stopSender("ln"));
vm5.invoke(() -> WANTestBase.stopSender("ln"));
// now, try to resume a stopped sender
vm4.invoke(() -> WANTestBase.resumeSender("ln"));
vm5.invoke(() -> WANTestBase.resumeSender("ln"));
// do more puts
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
// validate region size on remote vm to contain only the events put in local site
// before the senders are stopped.
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 100));
}
/**
* Normal scenario in which a sender is stopped.
*
* @throws Exception
*/
@Test
public void testParallelPropagationSenderStop() throws Exception {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 3, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 3, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 3, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 3, OrderPolicy.KEY));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
// make sure all the senders are running before doing any puts
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
// FIRST RUN: now, the senders are started. So, do some of the puts
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 100));
// now, stop all of the senders
vm4.invoke(() -> WANTestBase.stopSender("ln"));
vm5.invoke(() -> WANTestBase.stopSender("ln"));
vm6.invoke(() -> WANTestBase.stopSender("ln"));
vm7.invoke(() -> WANTestBase.stopSender("ln"));
// SECOND RUN: keep one thread doing puts
vm4.invokeAsync(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
// verify region size remains on remote vm and is restricted below a specified limit (number of
// puts in the first run)
vm2.invoke(() -> WANTestBase.validateRegionSizeRemainsSame(getTestMethodName() + "_PR", 100));
}
/**
* Normal scenario in which a sender is stopped and then started again.
*/
@Test
public void testParallelPropagationSenderStartAfterStop() throws Throwable {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
// make sure all the senders are running before doing any puts
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
// FIRST RUN: now, the senders are started. So, do some of the puts
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 200));
// now, stop all of the senders
vm4.invoke(() -> WANTestBase.stopSender("ln"));
vm5.invoke(() -> WANTestBase.stopSender("ln"));
vm6.invoke(() -> WANTestBase.stopSender("ln"));
vm7.invoke(() -> WANTestBase.stopSender("ln"));
Wait.pause(2000);
// SECOND RUN: do some of the puts after the senders are stopped
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
// Region size on remote site should remain same and below the number of puts done in the FIRST
// RUN
vm2.invoke(() -> WANTestBase.validateRegionSizeRemainsSame(getTestMethodName() + "_PR", 200));
// start the senders again
AsyncInvocation vm4start = vm4.invokeAsync(() -> WANTestBase.startSender("ln"));
AsyncInvocation vm5start = vm5.invokeAsync(() -> WANTestBase.startSender("ln"));
AsyncInvocation vm6start = vm6.invokeAsync(() -> WANTestBase.startSender("ln"));
AsyncInvocation vm7start = vm7.invokeAsync(() -> WANTestBase.startSender("ln"));
int START_TIMEOUT = 30000;
vm4start.getResult(START_TIMEOUT);
vm5start.getResult(START_TIMEOUT);
vm6start.getResult(START_TIMEOUT);
vm7start.getResult(START_TIMEOUT);
// Region size on remote site should remain same and below the number of puts done in the FIRST
// RUN
vm2.invoke(() -> WANTestBase.validateRegionSizeRemainsSame(getTestMethodName() + "_PR", 200));
// SECOND RUN: do some more puts
AsyncInvocation async =
vm4.invokeAsync(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
async.join();
// verify all the buckets on all the sender nodes are drained
vm4.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm5.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm6.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm7.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
// verify the events propagate to remote site
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 1000));
vm4.invoke(() -> WANTestBase.validateQueueSizeStat("ln", 0));
vm5.invoke(() -> WANTestBase.validateQueueSizeStat("ln", 0));
vm6.invoke(() -> WANTestBase.validateQueueSizeStat("ln", 0));
vm7.invoke(() -> WANTestBase.validateQueueSizeStat("ln", 0));
}
/**
* Normal scenario in which a sender is stopped and then started again. Differs from above test
* case in the way that when the sender is starting from stopped state, puts are simultaneously
* happening on the region by another thread.
*
* @throws Exception
*/
@Ignore("Bug47553")
@Test
public void testParallelPropagationSenderStartAfterStop_Scenario2() throws Exception {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 7, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 7, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 7, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 7, OrderPolicy.KEY));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
// make sure all the senders are running before doing any puts
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
LogWriterUtils.getLogWriter().info("All the senders are now started");
// FIRST RUN: now, the senders are started. So, do some of the puts
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 200));
LogWriterUtils.getLogWriter().info("Done few puts");
// now, stop all of the senders
vm4.invoke(() -> WANTestBase.stopSender("ln"));
vm5.invoke(() -> WANTestBase.stopSender("ln"));
vm6.invoke(() -> WANTestBase.stopSender("ln"));
vm7.invoke(() -> WANTestBase.stopSender("ln"));
LogWriterUtils.getLogWriter().info("All the senders are stopped");
Wait.pause(2000);
// SECOND RUN: do some of the puts after the senders are stopped
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
LogWriterUtils.getLogWriter().info("Done some more puts in second run");
// Region size on remote site should remain same and below the number of puts done in the FIRST
// RUN
vm2.invoke(() -> WANTestBase.validateRegionSizeRemainsSame(getTestMethodName() + "_PR", 200));
// SECOND RUN: start async puts on region
AsyncInvocation async =
vm4.invokeAsync(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 5000));
LogWriterUtils.getLogWriter().info("Started high number of puts by async thread");
LogWriterUtils.getLogWriter().info("Starting the senders at the same time");
// when puts are happening by another thread, start the senders
startSenderInVMsAsync("ln", vm4, vm5, vm6, vm7);
LogWriterUtils.getLogWriter().info("All the senders are started");
async.join();
Wait.pause(2000);
// verify all the buckets on all the sender nodes are drained
vm4.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm5.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm6.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm7.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
// verify that the queue size ultimately becomes zero. That means all the events propagate to
// remote site.
vm4.invoke(() -> WANTestBase.validateQueueContents("ln", 0));
}
/**
* Normal scenario in which a sender is stopped and then started again on accessor node.
*
* @throws Exception
*/
@Test
public void testParallelPropagationSenderStartAfterStopOnAccessorNode() throws Throwable {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegionAsAccessor(getTestMethodName() + "_PR",
"ln", 1, 100));
vm7.invoke(() -> WANTestBase.createPartitionedRegionAsAccessor(getTestMethodName() + "_PR",
"ln", 1, 100));
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 4, OrderPolicy.KEY));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
createReceiverInVMs(vm2, vm3);
// make sure all the senders are not running on accessor nodes and running on non-accessor nodes
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
// FIRST RUN: now, the senders are started. So, do some of the puts
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 200));
// now, stop all of the senders
vm4.invoke(() -> WANTestBase.stopSender("ln"));
vm5.invoke(() -> WANTestBase.stopSender("ln"));
vm6.invoke(() -> WANTestBase.stopSender("ln"));
vm7.invoke(() -> WANTestBase.stopSender("ln"));
// SECOND RUN: do some of the puts after the senders are stopped
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
// Region size on remote site should remain same and below the number of puts done in the FIRST
// RUN
vm2.invoke(() -> WANTestBase.validateRegionSizeRemainsSame(getTestMethodName() + "_PR", 200));
// start the senders again
AsyncInvocation vm4start = vm4.invokeAsync(() -> WANTestBase.startSender("ln"));
AsyncInvocation vm5start = vm5.invokeAsync(() -> WANTestBase.startSender("ln"));
AsyncInvocation vm6start = vm6.invokeAsync(() -> WANTestBase.startSender("ln"));
AsyncInvocation vm7start = vm7.invokeAsync(() -> WANTestBase.startSender("ln"));
vm4start.join();
vm5start.join();
vm6start.join();
vm7start.join();
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
// Region size on remote site should remain same and below the number of puts done in the FIRST
// RUN
vm2.invoke(() -> WANTestBase.validateRegionSizeRemainsSame(getTestMethodName() + "_PR", 200));
// SECOND RUN: do some more puts
AsyncInvocation async =
vm4.invokeAsync(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
async.join();
// verify all buckets drained only on non-accessor nodes.
vm4.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm5.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
// verify the events propagate to remote site
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 1000));
}
/**
* Normal scenario in which a combinations of start, pause, resume operations is tested
*/
@Test
public void testStartPauseResumeParallelGatewaySender() throws Exception {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
LogWriterUtils.getLogWriter().info("Created cache on local site");
vm4.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 5, OrderPolicy.KEY));
vm5.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 5, OrderPolicy.KEY));
vm6.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 5, OrderPolicy.KEY));
vm7.invoke(() -> WANTestBase.createConcurrentSender("ln", 2, true, 100, 10, false, false, null,
true, 5, OrderPolicy.KEY));
LogWriterUtils.getLogWriter().info("Created senders on local site");
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 100,
isOffHeap()));
LogWriterUtils.getLogWriter().info("Created PRs on local site");
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR", null, 1, 100,
isOffHeap()));
LogWriterUtils.getLogWriter().info("Created PRs on remote site");
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
LogWriterUtils.getLogWriter().info("Done 1000 puts on local site");
// Since puts are already done on userPR, it will have the buckets created.
// During sender start, it will wait until those buckets are created for shadowPR as well.
// Start the senders in async threads, so colocation of shadowPR will be complete and
// missing buckets will be created in PRHARedundancyProvider.createMissingBuckets().
startSenderInVMsAsync("ln", vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
LogWriterUtils.getLogWriter().info("Started senders on local site");
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 5000));
LogWriterUtils.getLogWriter().info("Done 5000 puts on local site");
vm4.invoke(() -> WANTestBase.pauseSender("ln"));
vm5.invoke(() -> WANTestBase.pauseSender("ln"));
vm6.invoke(() -> WANTestBase.pauseSender("ln"));
vm7.invoke(() -> WANTestBase.pauseSender("ln"));
LogWriterUtils.getLogWriter().info("Paused senders on local site");
vm4.invoke(() -> WANTestBase.verifySenderPausedState("ln"));
vm5.invoke(() -> WANTestBase.verifySenderPausedState("ln"));
vm6.invoke(() -> WANTestBase.verifySenderPausedState("ln"));
vm7.invoke(() -> WANTestBase.verifySenderPausedState("ln"));
AsyncInvocation inv1 =
vm4.invokeAsync(() -> WANTestBase.doPuts(getTestMethodName() + "_PR", 1000));
LogWriterUtils.getLogWriter().info("Started 1000 async puts on local site");
vm4.invoke(() -> WANTestBase.resumeSender("ln"));
vm5.invoke(() -> WANTestBase.resumeSender("ln"));
vm6.invoke(() -> WANTestBase.resumeSender("ln"));
vm7.invoke(() -> WANTestBase.resumeSender("ln"));
LogWriterUtils.getLogWriter().info("Resumed senders on local site");
vm4.invoke(() -> WANTestBase.verifySenderResumedState("ln"));
vm5.invoke(() -> WANTestBase.verifySenderResumedState("ln"));
vm6.invoke(() -> WANTestBase.verifySenderResumedState("ln"));
vm7.invoke(() -> WANTestBase.verifySenderResumedState("ln"));
try {
inv1.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail("Interrupted the async invocation.");
}
// verify all buckets drained on all sender nodes.
vm4.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm5.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm6.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm7.invoke(() -> WANTestBase.validateParallelSenderQueueAllBucketsDrained("ln"));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 5000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR", 5000));
}
}
| |
import java.io.*;
import java.util.*;
public class ReconstructOriginalDigitsFromEnglish
{
public static void main(String[] args)
{
System.out.println("=== Reconstruct Original Digits From English ===");
Solution solution = new Solution();
String[] inputs = {"owoztneoer", "fviefuro"};
for(String input: inputs) {
System.out.println(input+": "+solution.originalDigits(input));
}
}
}
class Solution
{
String[] numbers = { "zero", "one", "two", "three", "four",
"five", "six", "seven", "eight", "nine" };
HashMap<Character, Character> key_map = null;
ArrayList<HashMap<Character, Integer>> map_list = null;
HashMap<Character, ArrayList<Integer>> rmap = null;
ArrayList<Character> order = null;
public Solution() {
key_map = new HashMap<Character, Character>();
for(String n: numbers) {
for(char ch: n.toCharArray()) {
if(!key_map.containsKey(ch)) {
key_map.put(ch, ch);
}
}
}
map_list = new ArrayList<HashMap<Character, Integer>>();
for(int i = 0; i < 10; i++) {
HashMap<Character, Integer> map = stringToMap(numbers[i]);
map_list.add(map);
}
rmap = new HashMap<Character, ArrayList<Integer>>();
for(Character ch : key_map.keySet()) {
for(int i = 0; i < 10; i++) {
if(map_list.get(i).containsKey(ch)) {
if(rmap.containsKey(ch)) {
rmap.get(ch).add(i);
} else {
ArrayList<Integer> l = new ArrayList<Integer>();
l.add(i);
rmap.put(ch, l);
}
}
}
}
// compute search oder based on rmap
order = new ArrayList<Character>(key_map.keySet());
for(int i = 0; i < order.size(); i++) {
for(int j = i+1; j < order.size(); j++) {
if(rmap.get(order.get(i)).size() > rmap.get(order.get(j)).size()) {
// swap i and j in order list
Character ci = order.get(i);
Character cj = order.get(j);
order.remove(i);
order.add(i, cj);
order.remove(j);
order.add(j, ci);
}
}
}
// convert each number to a list of numbers(# of occurrance of char)
// the order is defined by order list
ArrayList<ArrayList<Integer>> feature_list = new ArrayList<ArrayList<Integer>>();
int i = 0;
for(HashMap<Character, Integer> map : map_list) {
ArrayList<Integer> feature = mapToFeature(map);
// debug feature
//System.out.println(i+" feature = "+feature);
i++;
}
}
public HashMap<Character, Integer> stringToMap(String s) {
HashMap<Character, Integer> map = new HashMap<Character, Integer>();
for(char ch: s.toCharArray()) {
if (map.containsKey(ch)) { map.put(ch, map.get(ch)+1); }
else { map.put(ch, 1); }
}
return map;
}
public ArrayList<Integer> mapToFeature(HashMap<Character, Integer> map) {
ArrayList<Integer> feature = new ArrayList<Integer>();
for(Character ch: order) {
if (!map.containsKey(ch)) {
feature.add(0);
} else {
feature.add(map.get(ch));
}
}
return feature;
}
public String originalDigits(String s) {
String ret = "";
HashMap<Character, Integer> s_map = stringToMap(s);
ArrayList<Integer> f = mapToFeature(s_map);
//System.out.println("s_feature = "+f);
int x0 = f.get(4);
int x1 = f.get(13) - f.get(4) - f.get(2) - f.get(1);
int x2 = f.get(2);
int x3 = f.get(6) - f.get(0);
int x4 = f.get(1);
int x5 = f.get(8) - f.get(1);
int x6 = f.get(3);
int x7 = f.get(7) - f.get(3);
int x8 = f.get(0);
int x9 = f.get(12) + f.get(1) - f.get(0) - f.get(3) - f.get(8);
int[] v = new int[] {x0, x1, x2, x3, x4, x5, x6, x7, x8, x9};
//System.out.println("v = "+Arrays.toString(v));
//
for(int i = 0; i < v.length; i++) {
for(int j = 0; j < v[i]; j++) { ret += i; }
}
return ret;
}
}
/*
*
=== Reconstruct Original Digits From English ===
order = [g, u, w, x, z, v, h, s, f, t, r, n, i, o, e]
0 feature = [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1]
1 feature = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1]
2 feature = [0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0]
3 feature = [0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 2]
4 feature = [0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0]
5 feature = [0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1]
6 feature = [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0]
7 feature = [0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 2]
8 feature = [1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1]
9 feature = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 1]
s_feature = [0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 3, 2]
v = [1, 1, 1, 0, 0, 0, 0, 0, 0, 0]
owoztneoer: null
s_feature = [0, 1, 0, 0, 0, 1, 0, 0, 2, 0, 1, 0, 1, 1, 1]
v = [0, 0, 0, 0, 1, 1, 0, 0, 0, 0]
fviefuro: null
x0 = s4
x2 = s2
x4 = s1
x6 = s3
x8 = s0
x5 + x7 = s5
x3 + x8 = s6
x6 + x7 = s7
x4 + x5 = s8
x2 + x3 + x8 = s9
x0 + x3 + x4 = s10
x1 + x7 + 2*x9 = s11
x5 + x6 + x8 + x9 = s12
x0 + x1 + x2 + x4 = s13
x0 + x1 + 2*x3 + x5 + 2*x7 + x8 + x9 = s14
==================================================
x0 = s4
x1 = s13 - s4 - s2 - s1
x2 = s2
x3 = s6 - s0
x4 = s1
x5 = s8 - s1
x6 = s3
x7 = s7 - s3
x8 = s0
x9 = s12 + s1 - s0 - s3 - s8
==============================
*
* */
| |
package gov.va.oia.terminology.converters.umlsUtils;
import gov.va.oia.terminology.converters.sharedUtils.ConsoleUtil;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
public class Relationship
{
private String name1;
private String description1;
private String name2;
private String description2;
private HashSet<String> name1SnomedCode = new HashSet<String>();
private HashSet<String> name2SnomedCode = new HashSet<String>();
private String name1RelType;
private String name2RelType;
private boolean isRela;
private Boolean swap;
private static HashMap<String, String> preferredNameMap = new HashMap<>();
static
{
//I didn't like the names they provide in the UMLS - so I put those in as descriptions, and use these as the preferred terms.
preferredNameMap.put("PAR", "is parent"); //This is a confusing mess in UMLS - they define it as "has parent" but they really mean "is the parent of"
preferredNameMap.put("CHD", "is child");
preferredNameMap.put("SY", "synonym");
preferredNameMap.put("SIB", "sibling");
preferredNameMap.put("DEL", "deleted");
preferredNameMap.put("RB", "broader");
preferredNameMap.put("RN", "narrower");
preferredNameMap.put("AQ", "allowed qualifier");
preferredNameMap.put("RO", "other");
preferredNameMap.put("RQ", "related, possibly synonymous");
preferredNameMap.put("XR", "not related");
preferredNameMap.put("RL", "alike");
preferredNameMap.put("RU", "related");
preferredNameMap.put("QB", "qualified by");
}
public Relationship(boolean isRela)
{
this.isRela = isRela;
}
public void addDescription(String name, String niceName)
{
if (name.equals(name1))
{
if (description1 != null)
{
throw new RuntimeException("Oops");
}
description1 = niceName;
}
else if (name.equals(name2))
{
if (description2 != null)
{
throw new RuntimeException("Oops");
}
description2 = niceName;
}
else if (name1 == null && name2 == null)
{
if (description1 != null)
{
throw new RuntimeException("Oops");
}
name1 = name;
description1 = niceName;
}
else
{
throw new RuntimeException("Oops");
}
}
public void addRelInverse(String name, String inverseRelName)
{
if (name1 == null && name2 == null)
{
name1 = name;
name2 = inverseRelName;
}
else if (name.equals(name1))
{
if (name2 == null)
{
name2 = inverseRelName;
}
else if (!name2.equals(inverseRelName))
{
throw new RuntimeException("oops");
}
}
else if (name.equals(name2))
{
if (name1 == null)
{
name1 = inverseRelName;
}
else if (!name1.equals(inverseRelName))
{
throw new RuntimeException("oops");
}
}
else
{
throw new RuntimeException("oops");
}
}
public void addSnomedCode(String name, String code)
{
if (name.equals(name1))
{
name1SnomedCode.add(code);
}
else if (name.equals(name2))
{
name2SnomedCode.add(code);
}
else
{
throw new RuntimeException("oops");
}
}
public void addRelType(String name, String type)
{
if (name.equals(name1))
{
if (name1RelType == null)
{
name1RelType = type;
}
else if (!name1RelType.equals(type))
{
throw new RuntimeException("oops");
}
}
else if (name.equals(name2))
{
if (name2RelType == null)
{
name2RelType = type;
}
else if (!name2RelType.equals(type))
{
throw new RuntimeException("oops");
}
}
else
{
throw new RuntimeException("oops");
}
}
public String getFSNName()
{
return swap ? name2 : name1;
}
public String getAltName()
{
return preferredNameMap.get(getFSNName());
}
public String getDescription()
{
return swap ? description2 : description1;
}
public String getInverseFSNName()
{
return swap ? name1 : name2;
}
public String getInverseAltName()
{
return getInverseFSNName() == null ? null : preferredNameMap.get(getInverseFSNName());
}
public String getInverseDescription()
{
return swap ? description1 : description2;
}
public Set<String> getRelSnomedCode()
{
return swap ? name2SnomedCode : name1SnomedCode;
}
public Set<String> getInverseRelSnomedCode()
{
return swap ? name1SnomedCode : name2SnomedCode;
}
public String getRelType()
{
return swap ? name2RelType : name1RelType;
}
public String getInverseRelType()
{
return swap ? name1RelType : name2RelType;
}
public boolean getIsRela()
{
return isRela;
}
public void setSwap(Connection c, String tablePrefix) throws SQLException
{
if (swap != null)
{
throw new RuntimeException("Swap already set!");
}
if (name1 == null && name2 != null)
{
swap = true;
}
else if (name2 == null && name1 != null)
{
swap = false;
}
else if (name1.equals(name2))
{
swap = false;
}
else if (name1.equals("RN") || name2.equals("RN")) //narrower as primary
{
swap = name2.equals("RN");
}
else if (name1.equals("AQ") || name2.equals("AQ")) //allowed qualifier as primary
{
swap = name2.equals("AQ");
}
else if (name1.equals("CHD") || name2.equals("CHD")) //is child as primary
{
swap = name2.equals("CHD");
}
else
{
//Use the primary assignments above, to figure out the more detailed assignments (where possible)
Statement s = c.createStatement();
ResultSet rs = s.executeQuery("Select distinct REL from " + tablePrefix + "REL where RELA='" + name1 + "'");
while (rs.next())
{
if (rs.getString("REL").equals("RO"))
{
//ignore these - they sometimes occur in tandem with a directional one below
continue;
}
if (name1.equals("mapped_from"))
{
//This one is all over the board in UMLS, sometimes tied to RB, sometimes RN, or a whole bunch of other types.
//Just let the code below handle it.
break;
}
String rel = rs.getString("REL");
if (swap != null)
{
//this is a bug? in umls - has_part and inverse_isa appears with both PAR and RB rels - but we set the swap the same for each, so ignore the second one.
// inverse_isa also uses RQ, but just ignore that too.
if ((name1.equals("inverse_isa") || name1.equals("has_part")) && (rel.equals("PAR") || rel.equals("RB") || rel.equals("RQ")))
{
continue;
}
else
{
throw new RuntimeException("too many results on rela " + name1);
}
}
if (new HashSet<String>(Arrays.asList(new String[] {"RB", "RN", "QB", "AQ", "PAR", "CHD"})).contains(rel))
{
if (rel.equals("RN") || rel.equals("AQ") || rel.equals("CHD"))
{
swap = false;
}
else
{
swap = true;
}
}
}
rs.close();
s.close();
//TODO utilize MRREL DIR column - see if that helps. Also talk to Brian, see if there is better code for this.
if (swap == null)
{
if (name1.startsWith("inverse_") || name2.startsWith("inverse_")) //inverse_ things as secondary
{
swap = name1.startsWith("inverse_");
}
else if (name1.startsWith("has_") || name2.startsWith("has_")) //has_ things as secondary
{
swap = name1.startsWith("has_");
}
else if (name1.startsWith("may_be") || name2.startsWith("may_be")) //may_be X as primary
{
swap = name2.startsWith("may_be");
}
else if (name1.contains("_from") || name2.contains("_from")) //X_from as primary
{
swap = name2.contains("_from");
}
else if (name1.contains("_by") || name2.contains("_by")) //X_by as primary
{
swap = name2.contains("_by");
}
else if (name1.contains("_in_") || name2.contains("_in_")) //X_in_ as primary
{
swap = name2.contains("_in_");
}
else if (name1.endsWith("_in") || name2.endsWith("_in")) //X_in as primary
{
swap = name2.endsWith("_in");
}
else if (name1.contains("_is") || name2.contains("_is")) //X_is as primary
{
swap = name2.contains("_is");
}
else if (name1.startsWith("is_") || name2.startsWith("is_")) //is_ as primary
{
swap = name2.startsWith("is_");
}
else if (name1.contains("_has") || name2.contains("_has")) //X_has as secondary
{
swap = name1.contains("_has");
}
else if (name1.equals("larger_than") || name2.equals("larger_than")) //swap smaller_than to primary
{
swap = name1.equals("larger_than");
}
else if (name1.equals("due_to") || name2.equals("due_to")) //due_to as primary, cause_of secondary
{
swap = name2.equals("due_to");
}
else if (name1.equals("occurs_after") || name2.equals("occurs_after")) //occurs_after as primary, occurs_before secondary
{
swap = name2.equals("occurs_after");
}
}
}
if (swap == null)
{
ConsoleUtil.println("No rel direction preference specified for " + name1 + "/" + name2 + " - using " + name1 + " as primary");
swap = false;
}
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.blueprint.container;
import java.lang.reflect.Array;
import java.lang.reflect.GenericArrayType;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
import java.lang.reflect.WildcardType;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.Map;
import org.apache.aries.blueprint.services.ExtendedBlueprintContainer;
import org.apache.aries.blueprint.di.ExecutionContext;
import org.osgi.framework.Bundle;
import org.osgi.service.blueprint.container.ReifiedType;
/**
* XXXX: Currently, in case of arrays getActualTypeArgument(0) returns something similar to what
* Class.getComponentType() does for arrays. I don't think this is quite right since getActualTypeArgument()
* should return the given parameterized type not the component type. Need to check this behavior with the spec.
*/
public class GenericType extends ReifiedType {
private static final GenericType[] EMPTY = new GenericType[0];
private static final Map<String, Class> primitiveClasses = new HashMap<String, Class>();
static {
primitiveClasses.put("int", int.class);
primitiveClasses.put("short", short.class);
primitiveClasses.put("long", long.class);
primitiveClasses.put("byte", byte.class);
primitiveClasses.put("char", char.class);
primitiveClasses.put("float", float.class);
primitiveClasses.put("double", double.class);
primitiveClasses.put("boolean", boolean.class);
}
enum BoundType {
Exact,
Extends,
Super
}
private GenericType[] parameters;
private BoundType boundType;
public GenericType(Type type) {
this(getConcreteClass(type), boundType(type), parametersOf(type));
}
public GenericType(Class clazz, GenericType... parameters) {
this(clazz, BoundType.Exact, parameters);
}
public GenericType(Class clazz, BoundType boundType, GenericType... parameters) {
super(clazz);
this.parameters = parameters;
this.boundType = boundType;
}
public static GenericType parse(String rawType, final Object loader) throws ClassNotFoundException, IllegalArgumentException {
final String type = rawType.trim();
// Check if this is an array
if (type.endsWith("[]")) {
GenericType t = parse(type.substring(0, type.length() - 2), loader);
return new GenericType(Array.newInstance(t.getRawClass(), 0).getClass(), t);
}
// Check if this is a generic
int genericIndex = type.indexOf('<');
if (genericIndex > 0) {
if (!type.endsWith(">")) {
throw new IllegalArgumentException("Can not load type: " + type);
}
GenericType base = parse(type.substring(0, genericIndex), loader);
String[] params = type.substring(genericIndex + 1, type.length() - 1).split(",");
GenericType[] types = new GenericType[params.length];
for (int i = 0; i < params.length; i++) {
types[i] = parse(params[i], loader);
}
return new GenericType(base.getRawClass(), types);
}
// Primitive
if (primitiveClasses.containsKey(type)) {
return new GenericType(primitiveClasses.get(type));
}
// Extends
if (type.startsWith("? extends ")) {
String raw = type.substring("? extends ".length());
return new GenericType(((ClassLoader) loader).loadClass(raw), BoundType.Extends);
}
// Super
if (type.startsWith("? super ")) {
String raw = type.substring("? extends ".length());
return new GenericType(((ClassLoader) loader).loadClass(raw), BoundType.Super);
}
// Class
if (loader instanceof ClassLoader) {
return new GenericType(((ClassLoader) loader).loadClass(type));
} else if (loader instanceof Bundle) {
try {
return AccessController.doPrivileged(new PrivilegedExceptionAction<GenericType>() {
public GenericType run() throws ClassNotFoundException {
return new GenericType(((Bundle) loader).loadClass(type));
}
});
} catch (PrivilegedActionException pae) {
Exception e = pae.getException();
if (e instanceof ClassNotFoundException)
throw (ClassNotFoundException) e;
else
throw (RuntimeException) e;
}
} else if (loader instanceof ExecutionContext) {
return new GenericType(((ExecutionContext) loader).loadClass(type));
} else if (loader instanceof ExtendedBlueprintContainer) {
return new GenericType(((ExtendedBlueprintContainer) loader).loadClass(type));
} else {
throw new IllegalArgumentException("Unsupported loader: " + loader);
}
}
@Override
public ReifiedType getActualTypeArgument(int i) {
if (parameters.length == 0) {
return super.getActualTypeArgument(i);
}
return parameters[i];
}
@Override
public int size() {
return parameters.length;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
if (boundType == BoundType.Extends) {
sb.append("? extends ");
} else if (boundType == BoundType.Super) {
sb.append("? super ");
}
Class cl = getRawClass();
if (cl.isArray()) {
if (parameters.length > 0) {
return parameters[0].toString() + "[]";
} else {
return cl.getComponentType().getName() + "[]";
}
}
sb.append(cl.getName());
if (parameters.length > 0) {
sb.append("<");
for (int i = 0; i < parameters.length; i++) {
if (i > 0) {
sb.append(",");
}
sb.append(parameters[i].toString());
}
sb.append(">");
}
return sb.toString();
}
public boolean equals(Object object) {
if (!(object instanceof GenericType)) {
return false;
}
GenericType other = (GenericType) object;
if (getRawClass() != other.getRawClass()) {
return false;
}
if (boundType != other.boundType) {
return false;
}
if (parameters == null) {
return (other.parameters == null);
} else {
if (other.parameters == null) {
return false;
}
if (parameters.length != other.parameters.length) {
return false;
}
for (int i = 0; i < parameters.length; i++) {
if (!parameters[i].equals(other.parameters[i])) {
return false;
}
}
return true;
}
}
static ReifiedType bound(ReifiedType type) {
if (type instanceof GenericType
&& ((GenericType) type).boundType != BoundType.Exact) {
GenericType t = (GenericType) type;
return new GenericType(t.getRawClass(), BoundType.Exact, t.parameters);
}
return type;
}
static BoundType boundType(ReifiedType type) {
if (type instanceof GenericType) {
return ((GenericType) type).boundType;
} else {
return BoundType.Exact;
}
}
static BoundType boundType(Type type) {
if (type instanceof WildcardType) {
WildcardType wct = (WildcardType) type;
return wct.getLowerBounds().length == 0
? BoundType.Extends : BoundType.Super;
}
return BoundType.Exact;
}
static GenericType[] parametersOf(Type type) {
if (type instanceof Class) {
Class clazz = (Class) type;
if (clazz.isArray()) {
GenericType t = new GenericType(clazz.getComponentType());
if (t.size() > 0) {
return new GenericType[] { t };
} else {
return EMPTY;
}
} else {
return EMPTY;
}
}
if (type instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) type;
Type [] parameters = pt.getActualTypeArguments();
GenericType[] gts = new GenericType[parameters.length];
for ( int i =0; i<gts.length; i++) {
gts[i] = new GenericType(parameters[i]);
}
return gts;
}
if (type instanceof GenericArrayType) {
return new GenericType[] { new GenericType(((GenericArrayType) type).getGenericComponentType()) };
}
if (type instanceof WildcardType) {
return EMPTY;
}
if (type instanceof TypeVariable) {
return EMPTY;
}
throw new IllegalStateException();
}
static Class<?> getConcreteClass(Type type) {
Type ntype = collapse(type);
if ( ntype instanceof Class )
return (Class<?>) ntype;
if ( ntype instanceof ParameterizedType )
return getConcreteClass(collapse(((ParameterizedType)ntype).getRawType()));
throw new RuntimeException("Unknown type " + type );
}
static Type collapse(Type target) {
if (target instanceof Class || target instanceof ParameterizedType ) {
return target;
} else if (target instanceof TypeVariable) {
return collapse(((TypeVariable<?>) target).getBounds()[0]);
} else if (target instanceof GenericArrayType) {
Type t = collapse(((GenericArrayType) target)
.getGenericComponentType());
while ( t instanceof ParameterizedType )
t = collapse(((ParameterizedType)t).getRawType());
return Array.newInstance((Class<?>)t, 0).getClass();
} else if (target instanceof WildcardType) {
WildcardType wct = (WildcardType) target;
if (wct.getLowerBounds().length == 0)
return collapse(wct.getUpperBounds()[0]);
else
return collapse(wct.getLowerBounds()[0]);
}
throw new RuntimeException("Huh? " + target);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.compiler;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Stack;
import org.apache.jasper.JasperException;
import org.apache.jasper.JspCompilationContext;
import org.apache.jasper.xmlparser.XMLEncodingDetector;
import org.apache.tomcat.util.scan.Jar;
import org.xml.sax.Attributes;
/**
* Controller for the parsing of a JSP page.
* <p>
* The same ParserController instance is used for a JSP page and any JSP
* segments included by it (via an include directive), where each segment may
* be provided in standard or XML syntax. This class selects and invokes the
* appropriate parser for the JSP page and its included segments.
*
* @author Pierre Delisle
* @author Jan Luehe
*/
class ParserController implements TagConstants {
private static final String CHARSET = "charset=";
private final JspCompilationContext ctxt;
private final Compiler compiler;
private final ErrorDispatcher err;
/*
* Indicates the syntax (XML or standard) of the file being processed
*/
private boolean isXml;
/*
* A stack to keep track of the 'current base directory'
* for include directives that refer to relative paths.
*/
private final Stack<String> baseDirStack = new Stack<>();
private boolean isEncodingSpecifiedInProlog;
private boolean isBomPresent;
private int skip;
private String sourceEnc;
private boolean isDefaultPageEncoding;
private boolean isTagFile;
private boolean directiveOnly;
/*
* Constructor
*/
public ParserController(JspCompilationContext ctxt, Compiler compiler) {
this.ctxt = ctxt;
this.compiler = compiler;
this.err = compiler.getErrorDispatcher();
}
public JspCompilationContext getJspCompilationContext () {
return ctxt;
}
public Compiler getCompiler () {
return compiler;
}
/**
* Parses a JSP page or tag file. This is invoked by the compiler.
*
* @param inFileName The path to the JSP page or tag file to be parsed.
*/
public Node.Nodes parse(String inFileName)
throws FileNotFoundException, JasperException, IOException {
// If we're parsing a packaged tag file or a resource included by it
// (using an include directive), ctxt.getTagFileJar() returns the
// JAR file from which to read the tag file or included resource,
// respectively.
isTagFile = ctxt.isTagFile();
directiveOnly = false;
return doParse(inFileName, null, ctxt.getTagFileJar());
}
/**
* Parses the directives of a JSP page or tag file. This is invoked by the
* compiler.
*
* @param inFileName The path to the JSP page or tag file to be parsed.
*/
public Node.Nodes parseDirectives(String inFileName)
throws FileNotFoundException, JasperException, IOException {
// If we're parsing a packaged tag file or a resource included by it
// (using an include directive), ctxt.getTagFileJar() returns the
// JAR file from which to read the tag file or included resource,
// respectively.
isTagFile = ctxt.isTagFile();
directiveOnly = true;
return doParse(inFileName, null, ctxt.getTagFileJar());
}
/**
* Processes an include directive with the given path.
*
* @param inFileName The path to the resource to be included.
* @param parent The parent node of the include directive.
* @param jar The JAR file from which to read the included resource,
* or null of the included resource is to be read from the filesystem
*/
public Node.Nodes parse(String inFileName, Node parent, Jar jar)
throws FileNotFoundException, JasperException, IOException {
// For files that are statically included, isTagfile and directiveOnly
// remain unchanged.
return doParse(inFileName, parent, jar);
}
/**
* Extracts tag file directive information from the given tag file.
*
* This is invoked by the compiler
*
* @param inFileName The name of the tag file to be parsed.
* @param jar The location of the tag file.
*/
public Node.Nodes parseTagFileDirectives(String inFileName, Jar jar)
throws FileNotFoundException, JasperException, IOException {
boolean isTagFileSave = isTagFile;
boolean directiveOnlySave = directiveOnly;
isTagFile = true;
directiveOnly = true;
Node.Nodes page = doParse(inFileName, null, jar);
directiveOnly = directiveOnlySave;
isTagFile = isTagFileSave;
return page;
}
/**
* Parses the JSP page or tag file with the given path name.
*
* @param inFileName The name of the JSP page or tag file to be parsed.
* @param parent The parent node (non-null when processing an include
* directive)
* @param isTagFile true if file to be parsed is tag file, and false if it
* is a regular JSP page
* @param directivesOnly true if the file to be parsed is a tag file and
* we are only interested in the directives needed for constructing a
* TagFileInfo.
* @param jar The JAR file from which to read the JSP page or tag file,
* or null if the JSP page or tag file is to be read from the filesystem
*/
private Node.Nodes doParse(String inFileName, Node parent, Jar jar)
throws FileNotFoundException, JasperException, IOException {
Node.Nodes parsedPage = null;
isEncodingSpecifiedInProlog = false;
isBomPresent = false;
isDefaultPageEncoding = false;
String absFileName = resolveFileName(inFileName);
String jspConfigPageEnc = getJspConfigPageEncoding(absFileName);
// Figure out what type of JSP document and encoding type we are
// dealing with
determineSyntaxAndEncoding(absFileName, jar, jspConfigPageEnc);
if (parent != null) {
// Included resource, add to dependent list
if (jar == null) {
compiler.getPageInfo().addDependant(absFileName,
ctxt.getLastModified(absFileName));
} else {
String entry = absFileName.substring(1);
compiler.getPageInfo().addDependant(jar.getURL(entry),
Long.valueOf(jar.getLastModified(entry)));
}
}
if ((isXml && isEncodingSpecifiedInProlog) || isBomPresent) {
/*
* Make sure the encoding explicitly specified in the XML
* prolog (if any) matches that in the JSP config element
* (if any), treating "UTF-16", "UTF-16BE", and "UTF-16LE" as
* identical.
*/
if (jspConfigPageEnc != null && !jspConfigPageEnc.equals(sourceEnc)
&& (!jspConfigPageEnc.startsWith("UTF-16")
|| !sourceEnc.startsWith("UTF-16"))) {
err.jspError("jsp.error.prolog_config_encoding_mismatch",
sourceEnc, jspConfigPageEnc);
}
}
// Dispatch to the appropriate parser
if (isXml) {
// JSP document (XML syntax)
// InputStream for jspx page is created and properly closed in
// JspDocumentParser.
parsedPage = JspDocumentParser.parse(this, absFileName, jar, parent,
isTagFile, directiveOnly, sourceEnc, jspConfigPageEnc,
isEncodingSpecifiedInProlog, isBomPresent);
} else {
// Standard syntax
InputStreamReader inStreamReader = null;
try {
inStreamReader = JspUtil.getReader(absFileName, sourceEnc,
jar, ctxt, err, skip);
JspReader jspReader = new JspReader(ctxt, absFileName,
inStreamReader, err);
parsedPage = Parser.parse(this, jspReader, parent, isTagFile,
directiveOnly, jar, sourceEnc, jspConfigPageEnc,
isDefaultPageEncoding, isBomPresent);
} finally {
if (inStreamReader != null) {
try {
inStreamReader.close();
} catch (Exception any) {
}
}
}
}
baseDirStack.pop();
return parsedPage;
}
/*
* Checks to see if the given URI is matched by a URL pattern specified in
* a jsp-property-group in web.xml, and if so, returns the value of the
* <page-encoding> element.
*
* @param absFileName The URI to match
*
* @return The value of the <page-encoding> attribute of the
* jsp-property-group with matching URL pattern
*/
private String getJspConfigPageEncoding(String absFileName) {
JspConfig jspConfig = ctxt.getOptions().getJspConfig();
JspConfig.JspProperty jspProperty
= jspConfig.findJspProperty(absFileName);
return jspProperty.getPageEncoding();
}
/**
* Determines the syntax (standard or XML) and page encoding properties
* for the given file, and stores them in the 'isXml' and 'sourceEnc'
* instance variables, respectively.
*/
private void determineSyntaxAndEncoding(String absFileName, Jar jar,
String jspConfigPageEnc)
throws JasperException, IOException {
isXml = false;
/*
* 'true' if the syntax (XML or standard) of the file is given
* from external information: either via a JSP configuration element,
* the ".jspx" suffix, or the enclosing file (for included resources)
*/
boolean isExternal = false;
/*
* Indicates whether we need to revert from temporary usage of
* "ISO-8859-1" back to "UTF-8"
*/
boolean revert = false;
JspConfig jspConfig = ctxt.getOptions().getJspConfig();
JspConfig.JspProperty jspProperty = jspConfig.findJspProperty(
absFileName);
if (jspProperty.isXml() != null) {
// If <is-xml> is specified in a <jsp-property-group>, it is used.
isXml = JspUtil.booleanValue(jspProperty.isXml());
isExternal = true;
} else if (absFileName.endsWith(".jspx")
|| absFileName.endsWith(".tagx")) {
isXml = true;
isExternal = true;
}
if (isExternal && !isXml) {
// JSP (standard) syntax. Use encoding specified in jsp-config
// if provided.
sourceEnc = jspConfigPageEnc;
if (sourceEnc != null) {
return;
}
// We don't know the encoding, so use BOM to determine it
sourceEnc = "ISO-8859-1";
} else {
// XML syntax or unknown, (auto)detect encoding ...
Object[] ret = XMLEncodingDetector.getEncoding(absFileName, jar,
ctxt, err);
sourceEnc = (String) ret[0];
if (((Boolean) ret[1]).booleanValue()) {
isEncodingSpecifiedInProlog = true;
}
if (((Boolean) ret[2]).booleanValue()) {
isBomPresent = true;
}
skip = ((Integer) ret[3]).intValue();
if (!isXml && sourceEnc.equals("UTF-8")) {
/*
* We don't know if we're dealing with XML or standard syntax.
* Therefore, we need to check to see if the page contains
* a <jsp:root> element.
*
* We need to be careful, because the page may be encoded in
* ISO-8859-1 (or something entirely different), and may
* contain byte sequences that will cause a UTF-8 converter to
* throw exceptions.
*
* It is safe to use a source encoding of ISO-8859-1 in this
* case, as there are no invalid byte sequences in ISO-8859-1,
* and the byte/character sequences we're looking for (i.e.,
* <jsp:root>) are identical in either encoding (both UTF-8
* and ISO-8859-1 are extensions of ASCII).
*/
sourceEnc = "ISO-8859-1";
revert = true;
}
}
if (isXml) {
// (This implies 'isExternal' is TRUE.)
// We know we're dealing with a JSP document (via JSP config or
// ".jspx" suffix), so we're done.
return;
}
/*
* At this point, 'isExternal' or 'isXml' is FALSE.
* Search for jsp:root action, in order to determine if we're dealing
* with XML or standard syntax (unless we already know what we're
* dealing with, i.e., when 'isExternal' is TRUE and 'isXml' is FALSE).
* No check for XML prolog, since nothing prevents a page from
* outputting XML and still using JSP syntax (in this case, the
* XML prolog is treated as template text).
*/
JspReader jspReader = null;
try {
jspReader = new JspReader(ctxt, absFileName, sourceEnc, jar, err);
} catch (FileNotFoundException ex) {
throw new JasperException(ex);
}
jspReader.setSingleFile(true);
Mark startMark = jspReader.mark();
if (!isExternal) {
jspReader.reset(startMark);
if (hasJspRoot(jspReader)) {
if (revert) {
sourceEnc = "UTF-8";
}
isXml = true;
return;
} else {
if (revert && isBomPresent) {
sourceEnc = "UTF-8";
}
isXml = false;
}
}
/*
* At this point, we know we're dealing with JSP syntax.
* If an XML prolog is provided, it's treated as template text.
* Determine the page encoding from the page directive, unless it's
* specified via JSP config.
*/
if (!isBomPresent) {
sourceEnc = jspConfigPageEnc;
if (sourceEnc == null) {
sourceEnc = getPageEncodingForJspSyntax(jspReader, startMark);
if (sourceEnc == null) {
// Default to "ISO-8859-1" per JSP spec
sourceEnc = "ISO-8859-1";
isDefaultPageEncoding = true;
}
}
}
}
/*
* Determines page source encoding for page or tag file in JSP syntax,
* by reading (in this order) the value of the 'pageEncoding' page
* directive attribute, or the charset value of the 'contentType' page
* directive attribute.
*
* @return The page encoding, or null if not found
*/
private String getPageEncodingForJspSyntax(JspReader jspReader,
Mark startMark)
throws JasperException {
String encoding = null;
String saveEncoding = null;
jspReader.reset(startMark);
/*
* Determine page encoding from directive of the form <%@ page %>,
* <%@ tag %>, <jsp:directive.page > or <jsp:directive.tag >.
*/
while (true) {
if (jspReader.skipUntil("<") == null) {
break;
}
// If this is a comment, skip until its end
if (jspReader.matches("%--")) {
if (jspReader.skipUntil("--%>") == null) {
// error will be caught in Parser
break;
}
continue;
}
boolean isDirective = jspReader.matches("%@");
if (isDirective) {
jspReader.skipSpaces();
}
else {
isDirective = jspReader.matches("jsp:directive.");
}
if (!isDirective) {
continue;
}
// compare for "tag ", so we don't match "taglib"
if (jspReader.matches("tag ") || jspReader.matches("page")) {
jspReader.skipSpaces();
Attributes attrs = Parser.parseAttributes(this, jspReader);
encoding = getPageEncodingFromDirective(attrs, "pageEncoding");
if (encoding != null) {
break;
}
encoding = getPageEncodingFromDirective(attrs, "contentType");
if (encoding != null) {
saveEncoding = encoding;
}
}
}
if (encoding == null) {
encoding = saveEncoding;
}
return encoding;
}
/*
* Scans the given attributes for the attribute with the given name,
* which is either 'pageEncoding' or 'contentType', and returns the
* specified page encoding.
*
* In the case of 'contentType', the page encoding is taken from the
* content type's 'charset' component.
*
* @param attrs The page directive attributes
* @param attrName The name of the attribute to search for (either
* 'pageEncoding' or 'contentType')
*
* @return The page encoding, or null
*/
private String getPageEncodingFromDirective(Attributes attrs,
String attrName) {
String value = attrs.getValue(attrName);
if (attrName.equals("pageEncoding")) {
return value;
}
// attrName = contentType
String contentType = value;
String encoding = null;
if (contentType != null) {
int loc = contentType.indexOf(CHARSET);
if (loc != -1) {
encoding = contentType.substring(loc + CHARSET.length());
}
}
return encoding;
}
/*
* Resolve the name of the file and update baseDirStack() to keep track of
* the current base directory for each included file.
* The 'root' file is always an 'absolute' path, so no need to put an
* initial value in the baseDirStack.
*/
private String resolveFileName(String inFileName) {
String fileName = inFileName.replace('\\', '/');
boolean isAbsolute = fileName.startsWith("/");
fileName = isAbsolute ? fileName
: baseDirStack.peek() + fileName;
String baseDir =
fileName.substring(0, fileName.lastIndexOf("/") + 1);
baseDirStack.push(baseDir);
return fileName;
}
/*
* Checks to see if the given page contains, as its first element, a <root>
* element whose prefix is bound to the JSP namespace, as in:
*
* <wombat:root xmlns:wombat="http://java.sun.com/JSP/Page" version="1.2">
* ...
* </wombat:root>
*
* @param reader The reader for this page
*
* @return true if this page contains a root element whose prefix is bound
* to the JSP namespace, and false otherwise
*/
private boolean hasJspRoot(JspReader reader) throws JasperException {
// <prefix>:root must be the first element
Mark start = null;
while ((start = reader.skipUntil("<")) != null) {
int c = reader.nextChar();
if (c != '!' && c != '?') break;
}
if (start == null) {
return false;
}
Mark stop = reader.skipUntil(":root");
if (stop == null) {
return false;
}
// call substring to get rid of leading '<'
String prefix = reader.getText(start, stop).substring(1);
start = stop;
stop = reader.skipUntil(">");
if (stop == null) {
return false;
}
// Determine namespace associated with <root> element's prefix
String root = reader.getText(start, stop);
String xmlnsDecl = "xmlns:" + prefix;
int index = root.indexOf(xmlnsDecl);
if (index == -1) {
return false;
}
index += xmlnsDecl.length();
while (index < root.length()
&& Character.isWhitespace(root.charAt(index))) {
index++;
}
if (index < root.length() && root.charAt(index) == '=') {
index++;
while (index < root.length()
&& Character.isWhitespace(root.charAt(index))) {
index++;
}
if (index < root.length()
&& (root.charAt(index) == '"' || root.charAt(index) == '\'')) {
index++;
if (root.regionMatches(index, JSP_URI, 0, JSP_URI.length())) {
return true;
}
}
}
return false;
}
}
| |
package nl.esciencecenter.e3dchem.knime.plants.configure;
import java.util.HashMap;
import java.util.Map;
import org.knime.core.node.InvalidSettingsException;
import org.knime.core.node.NodeSettingsRO;
import org.knime.core.node.NodeSettingsWO;
import org.knime.core.node.defaultnodesettings.SettingsModelBoolean;
import org.knime.core.node.defaultnodesettings.SettingsModelDouble;
import org.knime.core.node.defaultnodesettings.SettingsModelInteger;
import org.knime.core.node.defaultnodesettings.SettingsModelNumber;
import org.knime.core.node.defaultnodesettings.SettingsModelString;
public class ConfigureConfig {
// search algorithm
public SettingsModelStringSet search_speed = new SettingsModelStringSet("search_speed", "speed1", "speed1", "speed2",
"speed4");
public SettingsModelNumber aco_ants = new SettingsModelInteger("aco_ants", 20);
public SettingsModelBoolean flip_amide_bonds = new SettingsModelBoolean("flip_amide_bonds", false);
public SettingsModelBoolean flip_planar_n = new SettingsModelBoolean("flip_planar_n", true);
public SettingsModelBoolean force_flipped_bonds_planarity = new SettingsModelBoolean("force_flipped_bonds_planarity", false);
public SettingsModelBoolean force_planar_bond_rotation = new SettingsModelBoolean("force_planar_bond_rotation", true);
public SettingsModelBoolean rescore_simplex = new SettingsModelBoolean("rescore_simplex", true);
public SettingsModelBoolean flip_ring_corners = new SettingsModelBoolean("flip_ring_corners", false);
// binding site
public SettingsModelNumber bindingsite_center_x = new SettingsModelDouble("bindingsite_center_x", 0.0);
public SettingsModelNumber bindingsite_center_y = new SettingsModelDouble("bindingsite_center_y", 0.0);
public SettingsModelNumber bindingsite_center_z = new SettingsModelDouble("bindingsite_center_z", 0.0);
public SettingsModelNumber bindingsite_radius = new SettingsModelDouble("bindingsite_radius", 1.0);
// cluster algorithm
public SettingsModelNumber cluster_rmsd = new SettingsModelDouble("cluster_rmsd", 2.0);
public SettingsModelNumber cluster_structures = new SettingsModelInteger("cluster_structures", 10);
// scoring functions
// Intermolecular (protein-ligand interaction scoring)
public SettingsModelStringSet scoring_function = new SettingsModelStringSet("scoring_function", "chemplp", "chemplp", "plp",
"plp95");
public SettingsModelNumber outside_binding_site_penalty = new SettingsModelDouble("outside_binding_site_penalty", 50.0);
public SettingsModelBoolean enable_sulphur_acceptors = new SettingsModelBoolean("enable_sulphur_acceptors", false);
// Intramolecular ligand scoring
public SettingsModelStringSet ligand_intra_score = new SettingsModelStringSet("ligand_intra_score", "clash2", "clash",
"clash2", "lj");
public SettingsModelBoolean chemplp_clash_include_14 = new SettingsModelBoolean("chemplp_clash_include_14", true);
public SettingsModelBoolean chemplp_clash_include_HH = new SettingsModelBoolean("chemplp_clash_include_HH", false);
// input
public SettingsModelString protein_file = new SettingsModelString("protein_file", "protein.mol2");
public SettingsModelString ligand_file = new SettingsModelString("ligand_file", "ligands.mol2");
// output
public SettingsModelString output_dir = new SettingsModelString("output_dir", "results");
public SettingsModelBoolean write_protein_conformations = new SettingsModelBoolean("write_protein_conformations", true);
public SettingsModelBoolean write_protein_bindingsite = new SettingsModelBoolean("write_protein_bindingsite", true);
public SettingsModelBoolean write_protein_splitted = new SettingsModelBoolean("write_protein_splitted", true);
public SettingsModelBoolean write_multi_mol2 = new SettingsModelBoolean("write_multi_mol2", true);
public SettingsModelBoolean write_ranking_links = new SettingsModelBoolean("write_ranking_links", false);
public SettingsModelBoolean write_merged_protein = new SettingsModelBoolean("write_merged_protein", false);
public void saveSettingsTo(final NodeSettingsWO settings) {
// search algorithm
search_speed.saveSettingsTo(settings);
aco_ants.saveSettingsTo(settings);
flip_amide_bonds.saveSettingsTo(settings);
flip_planar_n.saveSettingsTo(settings);
force_flipped_bonds_planarity.saveSettingsTo(settings);
force_planar_bond_rotation.saveSettingsTo(settings);
rescore_simplex.saveSettingsTo(settings);
flip_ring_corners.saveSettingsTo(settings);
// binding site
bindingsite_center_x.saveSettingsTo(settings);
bindingsite_center_y.saveSettingsTo(settings);
bindingsite_center_z.saveSettingsTo(settings);
bindingsite_radius.saveSettingsTo(settings);
// cluster algorithm
cluster_rmsd.saveSettingsTo(settings);
cluster_structures.saveSettingsTo(settings);
// scoring functions
scoring_function.saveSettingsTo(settings);
outside_binding_site_penalty.saveSettingsTo(settings);
enable_sulphur_acceptors.saveSettingsTo(settings);
ligand_intra_score.saveSettingsTo(settings);
chemplp_clash_include_14.saveSettingsTo(settings);
chemplp_clash_include_HH.saveSettingsTo(settings);
// input
protein_file.saveSettingsTo(settings);
ligand_file.saveSettingsTo(settings);
// output
output_dir.saveSettingsTo(settings);
write_protein_conformations.saveSettingsTo(settings);
write_protein_bindingsite.saveSettingsTo(settings);
write_protein_splitted.saveSettingsTo(settings);
write_multi_mol2.saveSettingsTo(settings);
write_ranking_links.saveSettingsTo(settings);
write_merged_protein.saveSettingsTo(settings);
}
public void loadValidatedSettingsFrom(final NodeSettingsRO settings) throws InvalidSettingsException {
// search algorithm
search_speed.loadSettingsFrom(settings);
aco_ants.loadSettingsFrom(settings);
flip_amide_bonds.loadSettingsFrom(settings);
flip_planar_n.loadSettingsFrom(settings);
force_flipped_bonds_planarity.loadSettingsFrom(settings);
force_planar_bond_rotation.loadSettingsFrom(settings);
rescore_simplex.loadSettingsFrom(settings);
flip_ring_corners.loadSettingsFrom(settings);
// binding site
bindingsite_center_x.loadSettingsFrom(settings);
bindingsite_center_y.loadSettingsFrom(settings);
bindingsite_center_z.loadSettingsFrom(settings);
bindingsite_radius.loadSettingsFrom(settings);
// cluster algorithm
cluster_rmsd.loadSettingsFrom(settings);
cluster_structures.loadSettingsFrom(settings);
// scoring functions
scoring_function.loadSettingsFrom(settings);
outside_binding_site_penalty.loadSettingsFrom(settings);
enable_sulphur_acceptors.loadSettingsFrom(settings);
ligand_intra_score.loadSettingsFrom(settings);
chemplp_clash_include_14.loadSettingsFrom(settings);
chemplp_clash_include_HH.loadSettingsFrom(settings);
// input
protein_file.loadSettingsFrom(settings);
ligand_file.loadSettingsFrom(settings);
// output
output_dir.loadSettingsFrom(settings);
write_protein_conformations.loadSettingsFrom(settings);
write_protein_bindingsite.loadSettingsFrom(settings);
write_protein_splitted.loadSettingsFrom(settings);
write_multi_mol2.loadSettingsFrom(settings);
write_ranking_links.loadSettingsFrom(settings);
write_merged_protein.loadSettingsFrom(settings);
}
public void validateSettings(final NodeSettingsRO settings) throws InvalidSettingsException {
// search algorithm
search_speed.validateSettings(settings);
aco_ants.validateSettings(settings);
flip_amide_bonds.validateSettings(settings);
flip_planar_n.validateSettings(settings);
force_flipped_bonds_planarity.validateSettings(settings);
force_planar_bond_rotation.validateSettings(settings);
rescore_simplex.validateSettings(settings);
flip_ring_corners.validateSettings(settings);
// binding site
bindingsite_center_x.validateSettings(settings);
bindingsite_center_y.validateSettings(settings);
bindingsite_center_z.validateSettings(settings);
bindingsite_radius.validateSettings(settings);
// cluster algorithm
cluster_rmsd.validateSettings(settings);
cluster_structures.validateSettings(settings);
// scoring functions
scoring_function.validateSettings(settings);
outside_binding_site_penalty.validateSettings(settings);
enable_sulphur_acceptors.validateSettings(settings);
ligand_intra_score.validateSettings(settings);
chemplp_clash_include_14.validateSettings(settings);
chemplp_clash_include_HH.validateSettings(settings);
// input
protein_file.validateSettings(settings);
ligand_file.validateSettings(settings);
// output
output_dir.validateSettings(settings);
write_protein_conformations.validateSettings(settings);
write_protein_bindingsite.validateSettings(settings);
write_protein_splitted.validateSettings(settings);
write_multi_mol2.validateSettings(settings);
write_ranking_links.validateSettings(settings);
write_merged_protein.validateSettings(settings);
}
public Map<String, Object> asMap() {
HashMap<String, Object> map = new HashMap<String, Object>();
// search algorithm
map.put("search_speed", search_speed.getStringValue());
map.put("aco_ants", ((SettingsModelInteger) aco_ants).getIntValue());
map.put("flip_amide_bonds", flip_amide_bonds.getBooleanValue() ? "1" : "0");
map.put("flip_planar_n", flip_planar_n.getBooleanValue() ? "1" : "0");
map.put("force_flipped_bonds_planarity", force_flipped_bonds_planarity.getBooleanValue() ? "1" : "0");
map.put("force_planar_bond_rotation", force_planar_bond_rotation.getBooleanValue() ? "1" : "0");
map.put("rescore_mode", rescore_simplex.getBooleanValue() ? "simplex" : "no_simplex");
map.put("flip_ring_corners", flip_ring_corners.getBooleanValue() ? "1" : "0");
// binding site
map.put("bindingsite_center_x", ((SettingsModelDouble) bindingsite_center_x).getDoubleValue());
map.put("bindingsite_center_y", ((SettingsModelDouble) bindingsite_center_y).getDoubleValue());
map.put("bindingsite_center_z", ((SettingsModelDouble) bindingsite_center_z).getDoubleValue());
map.put("bindingsite_radius", ((SettingsModelDouble) bindingsite_radius).getDoubleValue());
// cluster algorithm
map.put("cluster_rmsd", ((SettingsModelDouble) cluster_rmsd).getDoubleValue());
map.put("cluster_structures", ((SettingsModelInteger) cluster_structures).getIntValue());
// scoring functions
map.put("scoring_function", scoring_function.getStringValue());
map.put("outside_binding_site_penalty", ((SettingsModelDouble) outside_binding_site_penalty).getDoubleValue());
map.put("enable_sulphur_acceptors", enable_sulphur_acceptors.getBooleanValue() ? "1" : "0");
map.put("ligand_intra_score", ligand_intra_score.getStringValue());
map.put("chemplp_clash_include_14", chemplp_clash_include_14.getBooleanValue() ? "1" : "0");
map.put("chemplp_clash_include_HH", chemplp_clash_include_HH.getBooleanValue() ? "1" : "0");
// input
map.put("protein_file", protein_file.getStringValue());
map.put("ligand_file", ligand_file.getStringValue());
// output
map.put("output_dir", output_dir.getStringValue());
map.put("write_protein_conformations", write_protein_conformations.getBooleanValue() ? "1" : "0");
map.put("write_protein_bindingsite", write_protein_bindingsite.getBooleanValue() ? "1" : "0");
map.put("write_protein_splitted", write_protein_splitted.getBooleanValue() ? "1" : "0");
map.put("write_multi_mol2", write_multi_mol2.getBooleanValue() ? "1" : "0");
map.put("write_ranking_links", write_ranking_links.getBooleanValue() ? "1" : "0");
map.put("write_merged_protein", write_merged_protein.getBooleanValue() ? "1" : "0");
return map;
}
}
| |
package cherry.common.db.gen.dto;
import cherry.foundation.type.DeletedFlag;
import java.io.Serializable;
import org.joda.time.LocalDateTime;
public class AsyncProcessCommand implements Serializable {
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column ASYNC_PROCESS_COMMAND.ID
*
* @mbggenerated
*/
private Long id;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column ASYNC_PROCESS_COMMAND.ASYNC_ID
*
* @mbggenerated
*/
private Long asyncId;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column ASYNC_PROCESS_COMMAND.COMMAND
*
* @mbggenerated
*/
private String command;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column ASYNC_PROCESS_COMMAND.UPDATED_AT
*
* @mbggenerated
*/
private LocalDateTime updatedAt;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column ASYNC_PROCESS_COMMAND.CREATED_AT
*
* @mbggenerated
*/
private LocalDateTime createdAt;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column ASYNC_PROCESS_COMMAND.LOCK_VERSION
*
* @mbggenerated
*/
private Integer lockVersion;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column ASYNC_PROCESS_COMMAND.DELETED_FLG
*
* @mbggenerated
*/
private DeletedFlag deletedFlg;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table ASYNC_PROCESS_COMMAND
*
* @mbggenerated
*/
private static final long serialVersionUID = 1L;
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column ASYNC_PROCESS_COMMAND.ID
*
* @return the value of ASYNC_PROCESS_COMMAND.ID
*
* @mbggenerated
*/
public Long getId() {
return id;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column ASYNC_PROCESS_COMMAND.ID
*
* @param id the value for ASYNC_PROCESS_COMMAND.ID
*
* @mbggenerated
*/
public void setId(Long id) {
this.id = id;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column ASYNC_PROCESS_COMMAND.ASYNC_ID
*
* @return the value of ASYNC_PROCESS_COMMAND.ASYNC_ID
*
* @mbggenerated
*/
public Long getAsyncId() {
return asyncId;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column ASYNC_PROCESS_COMMAND.ASYNC_ID
*
* @param asyncId the value for ASYNC_PROCESS_COMMAND.ASYNC_ID
*
* @mbggenerated
*/
public void setAsyncId(Long asyncId) {
this.asyncId = asyncId;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column ASYNC_PROCESS_COMMAND.COMMAND
*
* @return the value of ASYNC_PROCESS_COMMAND.COMMAND
*
* @mbggenerated
*/
public String getCommand() {
return command;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column ASYNC_PROCESS_COMMAND.COMMAND
*
* @param command the value for ASYNC_PROCESS_COMMAND.COMMAND
*
* @mbggenerated
*/
public void setCommand(String command) {
this.command = command;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column ASYNC_PROCESS_COMMAND.UPDATED_AT
*
* @return the value of ASYNC_PROCESS_COMMAND.UPDATED_AT
*
* @mbggenerated
*/
public LocalDateTime getUpdatedAt() {
return updatedAt;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column ASYNC_PROCESS_COMMAND.UPDATED_AT
*
* @param updatedAt the value for ASYNC_PROCESS_COMMAND.UPDATED_AT
*
* @mbggenerated
*/
public void setUpdatedAt(LocalDateTime updatedAt) {
this.updatedAt = updatedAt;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column ASYNC_PROCESS_COMMAND.CREATED_AT
*
* @return the value of ASYNC_PROCESS_COMMAND.CREATED_AT
*
* @mbggenerated
*/
public LocalDateTime getCreatedAt() {
return createdAt;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column ASYNC_PROCESS_COMMAND.CREATED_AT
*
* @param createdAt the value for ASYNC_PROCESS_COMMAND.CREATED_AT
*
* @mbggenerated
*/
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column ASYNC_PROCESS_COMMAND.LOCK_VERSION
*
* @return the value of ASYNC_PROCESS_COMMAND.LOCK_VERSION
*
* @mbggenerated
*/
public Integer getLockVersion() {
return lockVersion;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column ASYNC_PROCESS_COMMAND.LOCK_VERSION
*
* @param lockVersion the value for ASYNC_PROCESS_COMMAND.LOCK_VERSION
*
* @mbggenerated
*/
public void setLockVersion(Integer lockVersion) {
this.lockVersion = lockVersion;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column ASYNC_PROCESS_COMMAND.DELETED_FLG
*
* @return the value of ASYNC_PROCESS_COMMAND.DELETED_FLG
*
* @mbggenerated
*/
public DeletedFlag getDeletedFlg() {
return deletedFlg;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column ASYNC_PROCESS_COMMAND.DELETED_FLG
*
* @param deletedFlg the value for ASYNC_PROCESS_COMMAND.DELETED_FLG
*
* @mbggenerated
*/
public void setDeletedFlg(DeletedFlag deletedFlg) {
this.deletedFlg = deletedFlg;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_COMMAND
*
* @mbggenerated
*/
@Override
public boolean equals(Object that) {
if (this == that) {
return true;
}
if (that == null) {
return false;
}
if (getClass() != that.getClass()) {
return false;
}
AsyncProcessCommand other = (AsyncProcessCommand) that;
return (this.getId() == null ? other.getId() == null : this.getId().equals(other.getId()))
&& (this.getAsyncId() == null ? other.getAsyncId() == null : this.getAsyncId().equals(other.getAsyncId()))
&& (this.getCommand() == null ? other.getCommand() == null : this.getCommand().equals(other.getCommand()))
&& (this.getUpdatedAt() == null ? other.getUpdatedAt() == null : this.getUpdatedAt().equals(other.getUpdatedAt()))
&& (this.getCreatedAt() == null ? other.getCreatedAt() == null : this.getCreatedAt().equals(other.getCreatedAt()))
&& (this.getLockVersion() == null ? other.getLockVersion() == null : this.getLockVersion().equals(other.getLockVersion()))
&& (this.getDeletedFlg() == null ? other.getDeletedFlg() == null : this.getDeletedFlg().equals(other.getDeletedFlg()));
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_COMMAND
*
* @mbggenerated
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((getId() == null) ? 0 : getId().hashCode());
result = prime * result + ((getAsyncId() == null) ? 0 : getAsyncId().hashCode());
result = prime * result + ((getCommand() == null) ? 0 : getCommand().hashCode());
result = prime * result + ((getUpdatedAt() == null) ? 0 : getUpdatedAt().hashCode());
result = prime * result + ((getCreatedAt() == null) ? 0 : getCreatedAt().hashCode());
result = prime * result + ((getLockVersion() == null) ? 0 : getLockVersion().hashCode());
result = prime * result + ((getDeletedFlg() == null) ? 0 : getDeletedFlg().hashCode());
return result;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_COMMAND
*
* @mbggenerated
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(" [");
sb.append("Hash = ").append(hashCode());
sb.append(", id=").append(id);
sb.append(", asyncId=").append(asyncId);
sb.append(", command=").append(command);
sb.append(", updatedAt=").append(updatedAt);
sb.append(", createdAt=").append(createdAt);
sb.append(", lockVersion=").append(lockVersion);
sb.append(", deletedFlg=").append(deletedFlg);
sb.append(", serialVersionUID=").append(serialVersionUID);
sb.append("]");
return sb.toString();
}
}
| |
package com.gtm.csims.model.base;
import java.io.Serializable;
/**
* This is an object that contains data related to the BS_BUSIEVAL table.
* Do not modify this class because it will be overwritten if the configuration file
* related to this class is modified.
*
* @hibernate.class
* table="BS_BUSIEVAL"
*/
public abstract class BaseBsBusieval implements Serializable {
public static String REF = "BsBusieval";
public static String PROP_UPDATEATE = "Updateate";
public static String PROP_BUSIEVALNO = "Busievalno";
public static String PROP_STAT = "Stat";
public static String PROP_EVALORGNO = "Evalorgno";
public static String PROP_EVALEDORGNO = "Evaledorgno";
public static String PROP_CREATOR = "Creator";
public static String PROP_FLAG = "Flag";
public static String PROP_CREATORORGNO = "Creatororgno";
public static String PROP_CREATORORG = "Creatororg";
public static String PROP_CREATEDATE = "Createdate";
public static String PROP_EVALORGNM = "Evalorgnm";
public static String PROP_EVALEDORGNM = "Evaledorgnm";
public static String PROP_CRTDATE = "Crtdate";
public static String PROP_ID = "Id";
public static String PROP_EVALDURING = "EvalDuring";
public static String PROP_EVALYEAR = "EvalYear";
// constructors
public BaseBsBusieval () {
initialize();
}
/**
* Constructor for primary key
*/
public BaseBsBusieval (java.lang.String id) {
this.setId(id);
initialize();
}
protected void initialize () {}
private int hashCode = Integer.MIN_VALUE;
// primary key
private java.lang.String id;
// fields
private java.lang.String busievalno;
private java.lang.String evalorgnm;
private java.lang.String evalorgno;
private java.lang.String evaledorgnm;
private java.lang.String evaledorgno;
private java.lang.String evalCondition;
private java.lang.String evalInfo;
private java.lang.String evalContent;
private java.lang.String deScore;
private java.util.Date evaldt;
private java.lang.String histrec;
private java.lang.String creator;
private java.lang.String creatororg;
private java.lang.String creatororgno;
private java.util.Date crtdate;
private java.lang.String stat;
private java.lang.String flag;
private java.lang.String baseScore;
private java.lang.String evalDuring;
private java.lang.String evalYear;
private java.util.Date createdate;
private java.util.Date updateate;
/**
* Return the unique identifier of this class
* @hibernate.id
* generator-class="org.hibernate.id.UUIDHexGenerator"
* column="ID"
*/
public java.lang.String getId () {
return id;
}
/**
* Set the unique identifier of this class
* @param id the new ID
*/
public void setId (java.lang.String id) {
this.id = id;
this.hashCode = Integer.MIN_VALUE;
}
/**
* Return the value associated with the column: BUSIEVALNO
*/
public java.lang.String getBusievalno () {
return busievalno;
}
/**
* Set the value related to the column: BUSIEVALNO
* @param busievalno the BUSIEVALNO value
*/
public void setBusievalno (java.lang.String busievalno) {
this.busievalno = busievalno;
}
public java.lang.String getEvaledorgnm() {
return evaledorgnm;
}
public void setEvaledorgnm(java.lang.String evaledorgnm) {
this.evaledorgnm = evaledorgnm;
}
public java.lang.String getEvaledorgno() {
return evaledorgno;
}
public void setEvaledorgno(java.lang.String evaledorgno) {
this.evaledorgno = evaledorgno;
}
/**
* Return the value associated with the column: EVALORGNM
*/
public java.lang.String getEvalorgnm () {
return evalorgnm;
}
/**
* Set the value related to the column: EVALORGNM
* @param evalorgnm the EVALORGNM value
*/
public void setEvalorgnm (java.lang.String evalorgnm) {
this.evalorgnm = evalorgnm;
}
/**
* Return the value associated with the column: EVALORGNO
*/
public java.lang.String getEvalorgno () {
return evalorgno;
}
/**
* Set the value related to the column: EVALORGNO
* @param evalorgno the EVALORGNO value
*/
public void setEvalorgno (java.lang.String evalorgno) {
this.evalorgno = evalorgno;
}
/**
* Return the value associated with the column: HISTREC
*/
public java.lang.String getHistrec () {
return histrec;
}
/**
* Set the value related to the column: HISTREC
* @param histrec the HISTREC value
*/
public void setHistrec (java.lang.String histrec) {
this.histrec = histrec;
}
/**
* Return the value associated with the column: CREATOR
*/
public java.lang.String getCreator () {
return creator;
}
/**
* Set the value related to the column: CREATOR
* @param creator the CREATOR value
*/
public void setCreator (java.lang.String creator) {
this.creator = creator;
}
/**
* Return the value associated with the column: CREATORORG
*/
public java.lang.String getCreatororg () {
return creatororg;
}
/**
* Set the value related to the column: CREATORORG
* @param creatororg the CREATORORG value
*/
public void setCreatororg (java.lang.String creatororg) {
this.creatororg = creatororg;
}
/**
* Return the value associated with the column: CREATORORGNO
*/
public java.lang.String getCreatororgno () {
return creatororgno;
}
/**
* Set the value related to the column: CREATORORGNO
* @param creatororgno the CREATORORGNO value
*/
public void setCreatororgno (java.lang.String creatororgno) {
this.creatororgno = creatororgno;
}
/**
* Return the value associated with the column: CRTDATE
*/
public java.util.Date getCrtdate () {
return crtdate;
}
/**
* Set the value related to the column: CRTDATE
* @param crtdate the CRTDATE value
*/
public void setCrtdate (java.util.Date crtdate) {
this.crtdate = crtdate;
}
/**
* Return the value associated with the column: STAT
*/
public java.lang.String getStat () {
return stat;
}
/**
* Set the value related to the column: STAT
* @param stat the STAT value
*/
public void setStat (java.lang.String stat) {
this.stat = stat;
}
/**
* Return the value associated with the column: FLAG
*/
public java.lang.String getFlag () {
return flag;
}
/**
* Set the value related to the column: FLAG
* @param flag the FLAG value
*/
public void setFlag (java.lang.String flag) {
this.flag = flag;
}
/**
* Return the value associated with the column: CREATEDATE
*/
public java.util.Date getCreatedate () {
return createdate;
}
/**
* Set the value related to the column: CREATEDATE
* @param createdate the CREATEDATE value
*/
public void setCreatedate (java.util.Date createdate) {
this.createdate = createdate;
}
/**
* Return the value associated with the column: UPDATEATE
*/
public java.util.Date getUpdateate () {
return updateate;
}
/**
* Set the value related to the column: UPDATEATE
* @param updateate the UPDATEATE value
*/
public void setUpdateate (java.util.Date updateate) {
this.updateate = updateate;
}
public java.lang.String getEvalDuring() {
return evalDuring;
}
public void setEvalDuring(java.lang.String evalDuring) {
this.evalDuring = evalDuring;
}
public int getHashCode() {
return hashCode;
}
public void setHashCode(int hashCode) {
this.hashCode = hashCode;
}
public java.lang.String getEvalCondition() {
return evalCondition;
}
public void setEvalCondition(java.lang.String evalCondition) {
this.evalCondition = evalCondition;
}
public java.lang.String getEvalInfo() {
return evalInfo;
}
public void setEvalInfo(java.lang.String evalInfo) {
this.evalInfo = evalInfo;
}
public java.lang.String getEvalContent() {
return evalContent;
}
public void setEvalContent(java.lang.String evalContent) {
this.evalContent = evalContent;
}
public java.lang.String getDeScore() {
return deScore;
}
public void setDeScore(java.lang.String deScore) {
this.deScore = deScore;
}
public java.lang.String getBaseScore() {
return baseScore;
}
public void setBaseScore(java.lang.String baseScore) {
this.baseScore = baseScore;
}
public java.util.Date getEvaldt() {
return evaldt;
}
public void setEvaldt(java.util.Date evaldt) {
this.evaldt = evaldt;
}
public java.lang.String getEvalYear() {
return evalYear;
}
public void setEvalYear(java.lang.String evalYear) {
this.evalYear = evalYear;
}
public boolean equals (Object obj) {
if (null == obj) return false;
if (!(obj instanceof com.gtm.csims.model.BsBusieval)) return false;
else {
com.gtm.csims.model.BsBusieval bsBusieval = (com.gtm.csims.model.BsBusieval) obj;
if (null == this.getId() || null == bsBusieval.getId()) return false;
else return (this.getId().equals(bsBusieval.getId()));
}
}
public int hashCode () {
if (Integer.MIN_VALUE == this.hashCode) {
if (null == this.getId()) return super.hashCode();
else {
String hashStr = this.getClass().getName() + ":" + this.getId().hashCode();
this.hashCode = hashStr.hashCode();
}
}
return this.hashCode;
}
public String toString () {
return super.toString();
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisanalyticsv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kinesisanalyticsv2-2018-05-23/DiscoverInputSchema"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DiscoverInputSchemaResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The schema inferred from the streaming source. It identifies the format of the data in the streaming source and
* how each data element maps to corresponding columns in the in-application stream that you can create.
* </p>
*/
private SourceSchema inputSchema;
/**
* <p>
* An array of elements, where each element corresponds to a row in a stream record (a stream record can have more
* than one row).
* </p>
*/
private java.util.List<java.util.List<String>> parsedInputRecords;
/**
* <p>
* The stream data that was modified by the processor specified in the <code>InputProcessingConfiguration</code>
* parameter.
* </p>
*/
private java.util.List<String> processedInputRecords;
/**
* <p>
* The raw stream data that was sampled to infer the schema.
* </p>
*/
private java.util.List<String> rawInputRecords;
/**
* <p>
* The schema inferred from the streaming source. It identifies the format of the data in the streaming source and
* how each data element maps to corresponding columns in the in-application stream that you can create.
* </p>
*
* @param inputSchema
* The schema inferred from the streaming source. It identifies the format of the data in the streaming
* source and how each data element maps to corresponding columns in the in-application stream that you can
* create.
*/
public void setInputSchema(SourceSchema inputSchema) {
this.inputSchema = inputSchema;
}
/**
* <p>
* The schema inferred from the streaming source. It identifies the format of the data in the streaming source and
* how each data element maps to corresponding columns in the in-application stream that you can create.
* </p>
*
* @return The schema inferred from the streaming source. It identifies the format of the data in the streaming
* source and how each data element maps to corresponding columns in the in-application stream that you can
* create.
*/
public SourceSchema getInputSchema() {
return this.inputSchema;
}
/**
* <p>
* The schema inferred from the streaming source. It identifies the format of the data in the streaming source and
* how each data element maps to corresponding columns in the in-application stream that you can create.
* </p>
*
* @param inputSchema
* The schema inferred from the streaming source. It identifies the format of the data in the streaming
* source and how each data element maps to corresponding columns in the in-application stream that you can
* create.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DiscoverInputSchemaResult withInputSchema(SourceSchema inputSchema) {
setInputSchema(inputSchema);
return this;
}
/**
* <p>
* An array of elements, where each element corresponds to a row in a stream record (a stream record can have more
* than one row).
* </p>
*
* @return An array of elements, where each element corresponds to a row in a stream record (a stream record can
* have more than one row).
*/
public java.util.List<java.util.List<String>> getParsedInputRecords() {
return parsedInputRecords;
}
/**
* <p>
* An array of elements, where each element corresponds to a row in a stream record (a stream record can have more
* than one row).
* </p>
*
* @param parsedInputRecords
* An array of elements, where each element corresponds to a row in a stream record (a stream record can have
* more than one row).
*/
public void setParsedInputRecords(java.util.Collection<java.util.List<String>> parsedInputRecords) {
if (parsedInputRecords == null) {
this.parsedInputRecords = null;
return;
}
this.parsedInputRecords = new java.util.ArrayList<java.util.List<String>>(parsedInputRecords);
}
/**
* <p>
* An array of elements, where each element corresponds to a row in a stream record (a stream record can have more
* than one row).
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setParsedInputRecords(java.util.Collection)} or {@link #withParsedInputRecords(java.util.Collection)} if
* you want to override the existing values.
* </p>
*
* @param parsedInputRecords
* An array of elements, where each element corresponds to a row in a stream record (a stream record can have
* more than one row).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DiscoverInputSchemaResult withParsedInputRecords(java.util.List<String>... parsedInputRecords) {
if (this.parsedInputRecords == null) {
setParsedInputRecords(new java.util.ArrayList<java.util.List<String>>(parsedInputRecords.length));
}
for (java.util.List<String> ele : parsedInputRecords) {
this.parsedInputRecords.add(ele);
}
return this;
}
/**
* <p>
* An array of elements, where each element corresponds to a row in a stream record (a stream record can have more
* than one row).
* </p>
*
* @param parsedInputRecords
* An array of elements, where each element corresponds to a row in a stream record (a stream record can have
* more than one row).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DiscoverInputSchemaResult withParsedInputRecords(java.util.Collection<java.util.List<String>> parsedInputRecords) {
setParsedInputRecords(parsedInputRecords);
return this;
}
/**
* <p>
* The stream data that was modified by the processor specified in the <code>InputProcessingConfiguration</code>
* parameter.
* </p>
*
* @return The stream data that was modified by the processor specified in the
* <code>InputProcessingConfiguration</code> parameter.
*/
public java.util.List<String> getProcessedInputRecords() {
return processedInputRecords;
}
/**
* <p>
* The stream data that was modified by the processor specified in the <code>InputProcessingConfiguration</code>
* parameter.
* </p>
*
* @param processedInputRecords
* The stream data that was modified by the processor specified in the
* <code>InputProcessingConfiguration</code> parameter.
*/
public void setProcessedInputRecords(java.util.Collection<String> processedInputRecords) {
if (processedInputRecords == null) {
this.processedInputRecords = null;
return;
}
this.processedInputRecords = new java.util.ArrayList<String>(processedInputRecords);
}
/**
* <p>
* The stream data that was modified by the processor specified in the <code>InputProcessingConfiguration</code>
* parameter.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setProcessedInputRecords(java.util.Collection)} or
* {@link #withProcessedInputRecords(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param processedInputRecords
* The stream data that was modified by the processor specified in the
* <code>InputProcessingConfiguration</code> parameter.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DiscoverInputSchemaResult withProcessedInputRecords(String... processedInputRecords) {
if (this.processedInputRecords == null) {
setProcessedInputRecords(new java.util.ArrayList<String>(processedInputRecords.length));
}
for (String ele : processedInputRecords) {
this.processedInputRecords.add(ele);
}
return this;
}
/**
* <p>
* The stream data that was modified by the processor specified in the <code>InputProcessingConfiguration</code>
* parameter.
* </p>
*
* @param processedInputRecords
* The stream data that was modified by the processor specified in the
* <code>InputProcessingConfiguration</code> parameter.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DiscoverInputSchemaResult withProcessedInputRecords(java.util.Collection<String> processedInputRecords) {
setProcessedInputRecords(processedInputRecords);
return this;
}
/**
* <p>
* The raw stream data that was sampled to infer the schema.
* </p>
*
* @return The raw stream data that was sampled to infer the schema.
*/
public java.util.List<String> getRawInputRecords() {
return rawInputRecords;
}
/**
* <p>
* The raw stream data that was sampled to infer the schema.
* </p>
*
* @param rawInputRecords
* The raw stream data that was sampled to infer the schema.
*/
public void setRawInputRecords(java.util.Collection<String> rawInputRecords) {
if (rawInputRecords == null) {
this.rawInputRecords = null;
return;
}
this.rawInputRecords = new java.util.ArrayList<String>(rawInputRecords);
}
/**
* <p>
* The raw stream data that was sampled to infer the schema.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setRawInputRecords(java.util.Collection)} or {@link #withRawInputRecords(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param rawInputRecords
* The raw stream data that was sampled to infer the schema.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DiscoverInputSchemaResult withRawInputRecords(String... rawInputRecords) {
if (this.rawInputRecords == null) {
setRawInputRecords(new java.util.ArrayList<String>(rawInputRecords.length));
}
for (String ele : rawInputRecords) {
this.rawInputRecords.add(ele);
}
return this;
}
/**
* <p>
* The raw stream data that was sampled to infer the schema.
* </p>
*
* @param rawInputRecords
* The raw stream data that was sampled to infer the schema.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DiscoverInputSchemaResult withRawInputRecords(java.util.Collection<String> rawInputRecords) {
setRawInputRecords(rawInputRecords);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInputSchema() != null)
sb.append("InputSchema: ").append(getInputSchema()).append(",");
if (getParsedInputRecords() != null)
sb.append("ParsedInputRecords: ").append(getParsedInputRecords()).append(",");
if (getProcessedInputRecords() != null)
sb.append("ProcessedInputRecords: ").append(getProcessedInputRecords()).append(",");
if (getRawInputRecords() != null)
sb.append("RawInputRecords: ").append(getRawInputRecords());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DiscoverInputSchemaResult == false)
return false;
DiscoverInputSchemaResult other = (DiscoverInputSchemaResult) obj;
if (other.getInputSchema() == null ^ this.getInputSchema() == null)
return false;
if (other.getInputSchema() != null && other.getInputSchema().equals(this.getInputSchema()) == false)
return false;
if (other.getParsedInputRecords() == null ^ this.getParsedInputRecords() == null)
return false;
if (other.getParsedInputRecords() != null && other.getParsedInputRecords().equals(this.getParsedInputRecords()) == false)
return false;
if (other.getProcessedInputRecords() == null ^ this.getProcessedInputRecords() == null)
return false;
if (other.getProcessedInputRecords() != null && other.getProcessedInputRecords().equals(this.getProcessedInputRecords()) == false)
return false;
if (other.getRawInputRecords() == null ^ this.getRawInputRecords() == null)
return false;
if (other.getRawInputRecords() != null && other.getRawInputRecords().equals(this.getRawInputRecords()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInputSchema() == null) ? 0 : getInputSchema().hashCode());
hashCode = prime * hashCode + ((getParsedInputRecords() == null) ? 0 : getParsedInputRecords().hashCode());
hashCode = prime * hashCode + ((getProcessedInputRecords() == null) ? 0 : getProcessedInputRecords().hashCode());
hashCode = prime * hashCode + ((getRawInputRecords() == null) ? 0 : getRawInputRecords().hashCode());
return hashCode;
}
@Override
public DiscoverInputSchemaResult clone() {
try {
return (DiscoverInputSchemaResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package org.usip.osp.baseobjects;
import java.util.*;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import org.hibernate.annotations.Proxy;
import org.usip.osp.persistence.MultiSchemaHibernateUtil;
/**
* Represents a set of simulations (such as a group run simultaneously in a class.
*
*/
/*
* This file is part of the USIP Open Simulation Platform.<br>
*
* The USIP Open Simulation Platform is free software; you can
* redistribute it and/or modify it under the terms of the new BSD Style
* license associated with this distribution.<br>
*
* The USIP Open Simulation Platform is distributed WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. <BR>
*
*/
@Entity
@Proxy(lazy = false)
public class RunningSimSet {
/** Database id. */
@Id
@GeneratedValue
private Long id;
@Column(name = "SIM_ID")
private Long sim_id;
private String RunningSimSetName = "";
/** Value of the user id that created this set. */
private Long user_id;
/** Name of the user that created this set. */
private String username;
@Column(name = "CREATION_DATE", columnDefinition = "datetime")
@GeneratedValue
private Date creationDate;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getSim_id() {
return sim_id;
}
public void setSim_id(Long sim_id) {
this.sim_id = sim_id;
}
public String getRunningSimSetName() {
return RunningSimSetName;
}
public void setRunningSimSetName(String runningSimSetName) {
RunningSimSetName = runningSimSetName;
}
public Long getUser_id() {
return user_id;
}
public void setUser_id(Long user_id) {
this.user_id = user_id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
/**
* Returns a list of all running sims created for a simulation.
*
* @param simid
* @param schema
* @return
*/
public static List<RunningSimSet> getAllForSim(String simid, String schema) {
MultiSchemaHibernateUtil.beginTransaction(schema);
List<RunningSimSet> returnList = MultiSchemaHibernateUtil.getSession(schema).createQuery(
"from RunningSimSet where sim_id = :sim_id").setString("sim_id", simid).list(); //$NON-NLS-1$
MultiSchemaHibernateUtil.commitAndCloseTransaction(schema);
return returnList;
}
/**
*
* @param schema
* @param rs_id
* @return
*/
public static RunningSimSet getById(String schema, Long rs_id) {
MultiSchemaHibernateUtil.beginTransaction(schema);
RunningSimSet this_rs = (RunningSimSet) MultiSchemaHibernateUtil.getSession(schema).get(
RunningSimSet.class, rs_id);
MultiSchemaHibernateUtil.commitAndCloseTransaction(schema);
return this_rs;
}
public void saveMe(String schema) {
MultiSchemaHibernateUtil.beginTransaction(schema);
MultiSchemaHibernateUtil.getSession(schema).saveOrUpdate(this);
MultiSchemaHibernateUtil.commitAndCloseTransaction(schema);
}
/**
* Returns a hashtable where the key is the id of a running sim, and the value is just the string 'set'.
* @param schema
* @param rs_set_id
* @return
*/
public static Hashtable <Long, String> getHashSetOfRunningSims(String schema, Long rs_set_id){
Hashtable returnHashtable = new Hashtable();
List returnList = RunningSimSetAssignment.getAllForRunningSimulationSet(schema, rs_set_id);
for (ListIterator li = returnList.listIterator(); li.hasNext();) {
RunningSimSetAssignment rss = (RunningSimSetAssignment) li.next();
returnHashtable.put(rss.getRs_id(), "set");
}
return returnHashtable;
}
/**
* Returns all of the sets for a particular running simulation
*
* @param schema
* @return
*/
public static List <RunningSimSet> getAllForRunningSimulation(String schema, Long rs_id){
MultiSchemaHibernateUtil.beginTransaction(schema);
List returnList = MultiSchemaHibernateUtil.getSession(schema).createQuery(
"from RunningSimSet where rs_id = :rs_id")
.setLong("rs_id", rs_id).list(); //$NON-NLS-1$
MultiSchemaHibernateUtil.commitAndCloseTransaction(schema);
return returnList;
}
/**
*
*/
public static String getListOfRunningSimsInSameSets(String schema, Long rs_id){
String returnString = "";
for (Enumeration e = getAllRunningSimsInSameSet(schema, rs_id); e.hasMoreElements();) {
Long key = (Long) e.nextElement();
RunningSimulation rs = RunningSimulation.getById(schema, key);
returnString += rs.getName() + ", ";
}
if (returnString.length() > 2){
returnString = returnString.substring(0, returnString.length() -2);
}
return returnString;
}
/**
*
* @param schema
* @param rs_id
* @return
*/
public static Enumeration getAllRunningSimsInSameSet(String schema, Long rs_id){
// We get all of the sets that this running sim is in.
List listOfSets = RunningSimSetAssignment.getAllForRunningSimulation(schema, rs_id);
Hashtable fullHash = new Hashtable();
for (ListIterator li = listOfSets.listIterator(); li.hasNext();) {
RunningSimSetAssignment rssa = (RunningSimSetAssignment) li.next();
// Get a list (as in the keys of this hashtable) of all the running sims found for this set.
Hashtable thisSet = getHashSetOfRunningSims(schema, rssa.getRs_set_id());
fullHash.putAll(thisSet);
}
return fullHash.keys();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.AppendTestUtil;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TestFileTruncate {
static {
GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.ALL);
GenericTestUtils.setLogLevel(FSEditLogLoader.LOG, Level.ALL);
}
static final Log LOG = LogFactory.getLog(TestFileTruncate.class);
static final int BLOCK_SIZE = 4;
static final short REPLICATION = 3;
static final int DATANODE_NUM = 3;
static final int SUCCESS_ATTEMPTS = 300;
static final int RECOVERY_ATTEMPTS = 600;
static final long SLEEP = 100L;
static final long LOW_SOFTLIMIT = 100L;
static final long LOW_HARDLIMIT = 200L;
static final int SHORT_HEARTBEAT = 1;
static Configuration conf;
static MiniDFSCluster cluster;
static DistributedFileSystem fs;
private Path parent;
@Before
public void setUp() throws IOException {
conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY, BLOCK_SIZE);
conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, BLOCK_SIZE);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, SHORT_HEARTBEAT);
conf.setLong(
DFSConfigKeys.DFS_NAMENODE_REPLICATION_PENDING_TIMEOUT_SEC_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf)
.format(true)
.numDataNodes(DATANODE_NUM)
.nameNodePort(HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT)
.waitSafeMode(true)
.build();
fs = cluster.getFileSystem();
parent = new Path("/test");
}
@After
public void tearDown() throws IOException {
if(fs != null) fs.close();
if(cluster != null) cluster.shutdown();
}
/**
* Truncate files of different sizes byte by byte.
*/
@Test
public void testBasicTruncate() throws IOException {
int startingFileSize = 3 * BLOCK_SIZE;
fs.mkdirs(parent);
fs.setQuota(parent, 100, 1000);
byte[] contents = AppendTestUtil.initBuffer(startingFileSize);
for (int fileLength = startingFileSize; fileLength > 0;
fileLength -= BLOCK_SIZE - 1) {
for (int toTruncate = 0; toTruncate <= fileLength; toTruncate++) {
final Path p = new Path(parent, "testBasicTruncate" + fileLength);
writeContents(contents, fileLength, p);
int newLength = fileLength - toTruncate;
boolean isReady = fs.truncate(p, newLength);
LOG.info("fileLength=" + fileLength + ", newLength=" + newLength
+ ", toTruncate=" + toTruncate + ", isReady=" + isReady);
assertEquals("File must be closed for zero truncate"
+ " or truncating at the block boundary",
isReady, toTruncate == 0 || newLength % BLOCK_SIZE == 0);
if (!isReady) {
checkBlockRecovery(p);
}
ContentSummary cs = fs.getContentSummary(parent);
assertEquals("Bad disk space usage",
cs.getSpaceConsumed(), newLength * REPLICATION);
// validate the file content
checkFullFile(p, newLength, contents);
}
}
fs.delete(parent, true);
}
/** Truncate the same file multiple times until its size is zero. */
@Test
public void testMultipleTruncate() throws IOException {
Path dir = new Path("/testMultipleTruncate");
fs.mkdirs(dir);
final Path p = new Path(dir, "file");
final byte[] data = new byte[100 * BLOCK_SIZE];
ThreadLocalRandom.current().nextBytes(data);
writeContents(data, data.length, p);
for(int n = data.length; n > 0; ) {
final int newLength = ThreadLocalRandom.current().nextInt(n);
final boolean isReady = fs.truncate(p, newLength);
LOG.info("newLength=" + newLength + ", isReady=" + isReady);
assertEquals("File must be closed for truncating at the block boundary",
isReady, newLength % BLOCK_SIZE == 0);
assertEquals("Truncate is not idempotent",
isReady, fs.truncate(p, newLength));
if (!isReady) {
checkBlockRecovery(p);
}
checkFullFile(p, newLength, data);
n = newLength;
}
fs.delete(dir, true);
}
/** Truncate the same file multiple times until its size is zero. */
@Test
public void testSnapshotTruncateThenDeleteSnapshot() throws IOException {
Path dir = new Path("/testSnapshotTruncateThenDeleteSnapshot");
fs.mkdirs(dir);
fs.allowSnapshot(dir);
final Path p = new Path(dir, "file");
final byte[] data = new byte[BLOCK_SIZE];
ThreadLocalRandom.current().nextBytes(data);
writeContents(data, data.length, p);
final String snapshot = "s0";
fs.createSnapshot(dir, snapshot);
Block lastBlock = getLocatedBlocks(p).getLastLocatedBlock()
.getBlock().getLocalBlock();
final int newLength = data.length - 1;
assert newLength % BLOCK_SIZE != 0 :
" newLength must not be multiple of BLOCK_SIZE";
final boolean isReady = fs.truncate(p, newLength);
LOG.info("newLength=" + newLength + ", isReady=" + isReady);
assertEquals("File must be closed for truncating at the block boundary",
isReady, newLength % BLOCK_SIZE == 0);
fs.deleteSnapshot(dir, snapshot);
if (!isReady) {
checkBlockRecovery(p);
}
checkFullFile(p, newLength, data);
assertBlockNotPresent(lastBlock);
fs.delete(dir, true);
}
/**
* Truncate files and then run other operations such as
* rename, set replication, set permission, etc.
*/
@Test
public void testTruncateWithOtherOperations() throws IOException {
Path dir = new Path("/testTruncateOtherOperations");
fs.mkdirs(dir);
final Path p = new Path(dir, "file");
final byte[] data = new byte[2 * BLOCK_SIZE];
ThreadLocalRandom.current().nextBytes(data);
writeContents(data, data.length, p);
final int newLength = data.length - 1;
boolean isReady = fs.truncate(p, newLength);
assertFalse(isReady);
fs.setReplication(p, (short)(REPLICATION - 1));
fs.setPermission(p, FsPermission.createImmutable((short)0444));
final Path q = new Path(dir, "newFile");
fs.rename(p, q);
checkBlockRecovery(q);
checkFullFile(q, newLength, data);
cluster.restartNameNode();
checkFullFile(q, newLength, data);
fs.delete(dir, true);
}
@Test
public void testSnapshotWithAppendTruncate() throws IOException {
testSnapshotWithAppendTruncate(0, 1, 2);
testSnapshotWithAppendTruncate(0, 2, 1);
testSnapshotWithAppendTruncate(1, 0, 2);
testSnapshotWithAppendTruncate(1, 2, 0);
testSnapshotWithAppendTruncate(2, 0, 1);
testSnapshotWithAppendTruncate(2, 1, 0);
}
/**
* Create three snapshots with appended and truncated file.
* Delete snapshots in the specified order and verify that
* remaining snapshots are still readable.
*/
void testSnapshotWithAppendTruncate(int ... deleteOrder) throws IOException {
FSDirectory fsDir = cluster.getNamesystem().getFSDirectory();
fs.mkdirs(parent);
fs.setQuota(parent, 100, 1000);
fs.allowSnapshot(parent);
String truncateFile = "testSnapshotWithAppendTruncate";
final Path src = new Path(parent, truncateFile);
int[] length = new int[4];
length[0] = 2 * BLOCK_SIZE + BLOCK_SIZE / 2;
DFSTestUtil.createFile(fs, src, 64, length[0], BLOCK_SIZE, REPLICATION, 0L);
Block firstBlk = getLocatedBlocks(src).get(0).getBlock().getLocalBlock();
Path[] snapshotFiles = new Path[4];
// Diskspace consumed should be 10 bytes * 3. [blk 1,2,3]
ContentSummary contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(30L));
// Add file to snapshot and append
String[] ss = new String[] {"ss0", "ss1", "ss2", "ss3"};
Path snapshotDir = fs.createSnapshot(parent, ss[0]);
snapshotFiles[0] = new Path(snapshotDir, truncateFile);
length[1] = length[2] = length[0] + BLOCK_SIZE + 1;
DFSTestUtil.appendFile(fs, src, BLOCK_SIZE + 1);
Block lastBlk = getLocatedBlocks(src).getLastLocatedBlock()
.getBlock().getLocalBlock();
// Diskspace consumed should be 15 bytes * 3. [blk 1,2,3,4]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(45L));
// Create another snapshot without changes
snapshotDir = fs.createSnapshot(parent, ss[1]);
snapshotFiles[1] = new Path(snapshotDir, truncateFile);
// Create another snapshot and append
snapshotDir = fs.createSnapshot(parent, ss[2]);
snapshotFiles[2] = new Path(snapshotDir, truncateFile);
DFSTestUtil.appendFile(fs, src, BLOCK_SIZE -1 + BLOCK_SIZE / 2);
Block appendedBlk = getLocatedBlocks(src).getLastLocatedBlock()
.getBlock().getLocalBlock();
// Diskspace consumed should be 20 bytes * 3. [blk 1,2,3,4,5]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(60L));
// Truncate to block boundary
int newLength = length[0] + BLOCK_SIZE / 2;
boolean isReady = fs.truncate(src, newLength);
assertTrue("Recovery is not expected.", isReady);
assertFileLength(snapshotFiles[2], length[2]);
assertFileLength(snapshotFiles[1], length[1]);
assertFileLength(snapshotFiles[0], length[0]);
assertBlockNotPresent(appendedBlk);
// Diskspace consumed should be 16 bytes * 3. [blk 1,2,3 SS:4]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(48L));
// Truncate full block again
newLength = length[0] - BLOCK_SIZE / 2;
isReady = fs.truncate(src, newLength);
assertTrue("Recovery is not expected.", isReady);
assertFileLength(snapshotFiles[2], length[2]);
assertFileLength(snapshotFiles[1], length[1]);
assertFileLength(snapshotFiles[0], length[0]);
// Diskspace consumed should be 16 bytes * 3. [blk 1,2 SS:3,4]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(48L));
// Truncate half of the last block
newLength -= BLOCK_SIZE / 2;
isReady = fs.truncate(src, newLength);
assertFalse("Recovery is expected.", isReady);
checkBlockRecovery(src);
assertFileLength(snapshotFiles[2], length[2]);
assertFileLength(snapshotFiles[1], length[1]);
assertFileLength(snapshotFiles[0], length[0]);
Block replacedBlk = getLocatedBlocks(src).getLastLocatedBlock()
.getBlock().getLocalBlock();
// Diskspace consumed should be 16 bytes * 3. [blk 1,6 SS:2,3,4]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(54L));
snapshotDir = fs.createSnapshot(parent, ss[3]);
snapshotFiles[3] = new Path(snapshotDir, truncateFile);
length[3] = newLength;
// Delete file. Should still be able to read snapshots
int numINodes = fsDir.getInodeMapSize();
isReady = fs.delete(src, false);
assertTrue("Delete failed.", isReady);
assertFileLength(snapshotFiles[3], length[3]);
assertFileLength(snapshotFiles[2], length[2]);
assertFileLength(snapshotFiles[1], length[1]);
assertFileLength(snapshotFiles[0], length[0]);
assertEquals("Number of INodes should not change",
numINodes, fsDir.getInodeMapSize());
fs.deleteSnapshot(parent, ss[3]);
assertBlockExists(firstBlk);
assertBlockExists(lastBlk);
assertBlockNotPresent(replacedBlk);
// Diskspace consumed should be 16 bytes * 3. [SS:1,2,3,4]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(48L));
// delete snapshots in the specified order
fs.deleteSnapshot(parent, ss[deleteOrder[0]]);
assertFileLength(snapshotFiles[deleteOrder[1]], length[deleteOrder[1]]);
assertFileLength(snapshotFiles[deleteOrder[2]], length[deleteOrder[2]]);
assertBlockExists(firstBlk);
assertBlockExists(lastBlk);
assertEquals("Number of INodes should not change",
numINodes, fsDir.getInodeMapSize());
// Diskspace consumed should be 16 bytes * 3. [SS:1,2,3,4]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(48L));
fs.deleteSnapshot(parent, ss[deleteOrder[1]]);
assertFileLength(snapshotFiles[deleteOrder[2]], length[deleteOrder[2]]);
assertBlockExists(firstBlk);
contentSummary = fs.getContentSummary(parent);
if(fs.exists(snapshotFiles[0])) {
// Diskspace consumed should be 0 bytes * 3. [SS:1,2,3]
assertBlockNotPresent(lastBlk);
assertThat(contentSummary.getSpaceConsumed(), is(36L));
} else {
// Diskspace consumed should be 48 bytes * 3. [SS:1,2,3,4]
assertThat(contentSummary.getSpaceConsumed(), is(48L));
}
assertEquals("Number of INodes should not change",
numINodes, fsDir .getInodeMapSize());
fs.deleteSnapshot(parent, ss[deleteOrder[2]]);
assertBlockNotPresent(firstBlk);
assertBlockNotPresent(lastBlk);
// Diskspace consumed should be 0 bytes * 3. []
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(0L));
assertNotEquals("Number of INodes should change",
numINodes, fsDir.getInodeMapSize());
}
/**
* Create three snapshots with file truncated 3 times.
* Delete snapshots in the specified order and verify that
* remaining snapshots are still readable.
*/
@Test
public void testSnapshotWithTruncates() throws IOException {
testSnapshotWithTruncates(0, 1, 2);
testSnapshotWithTruncates(0, 2, 1);
testSnapshotWithTruncates(1, 0, 2);
testSnapshotWithTruncates(1, 2, 0);
testSnapshotWithTruncates(2, 0, 1);
testSnapshotWithTruncates(2, 1, 0);
}
void testSnapshotWithTruncates(int ... deleteOrder) throws IOException {
fs.mkdirs(parent);
fs.setQuota(parent, 100, 1000);
fs.allowSnapshot(parent);
String truncateFile = "testSnapshotWithTruncates";
final Path src = new Path(parent, truncateFile);
int[] length = new int[3];
length[0] = 3 * BLOCK_SIZE;
DFSTestUtil.createFile(fs, src, 64, length[0], BLOCK_SIZE, REPLICATION, 0L);
Block firstBlk = getLocatedBlocks(src).get(0).getBlock().getLocalBlock();
Block lastBlk = getLocatedBlocks(src).getLastLocatedBlock()
.getBlock().getLocalBlock();
Path[] snapshotFiles = new Path[3];
// Diskspace consumed should be 12 bytes * 3. [blk 1,2,3]
ContentSummary contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(36L));
// Add file to snapshot and append
String[] ss = new String[] {"ss0", "ss1", "ss2"};
Path snapshotDir = fs.createSnapshot(parent, ss[0]);
snapshotFiles[0] = new Path(snapshotDir, truncateFile);
length[1] = 2 * BLOCK_SIZE;
boolean isReady = fs.truncate(src, 2 * BLOCK_SIZE);
assertTrue("Recovery is not expected.", isReady);
// Diskspace consumed should be 12 bytes * 3. [blk 1,2 SS:3]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(36L));
snapshotDir = fs.createSnapshot(parent, ss[1]);
snapshotFiles[1] = new Path(snapshotDir, truncateFile);
// Create another snapshot with truncate
length[2] = BLOCK_SIZE + BLOCK_SIZE / 2;
isReady = fs.truncate(src, BLOCK_SIZE + BLOCK_SIZE / 2);
assertFalse("Recovery is expected.", isReady);
checkBlockRecovery(src);
snapshotDir = fs.createSnapshot(parent, ss[2]);
snapshotFiles[2] = new Path(snapshotDir, truncateFile);
assertFileLength(snapshotFiles[0], length[0]);
assertBlockExists(lastBlk);
// Diskspace consumed should be 14 bytes * 3. [blk 1,4 SS:2,3]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(42L));
fs.deleteSnapshot(parent, ss[deleteOrder[0]]);
assertFileLength(snapshotFiles[deleteOrder[1]], length[deleteOrder[1]]);
assertFileLength(snapshotFiles[deleteOrder[2]], length[deleteOrder[2]]);
assertFileLength(src, length[2]);
assertBlockExists(firstBlk);
contentSummary = fs.getContentSummary(parent);
if(fs.exists(snapshotFiles[0])) {
// Diskspace consumed should be 14 bytes * 3. [blk 1,4 SS:2,3]
assertThat(contentSummary.getSpaceConsumed(), is(42L));
assertBlockExists(lastBlk);
} else {
// Diskspace consumed should be 10 bytes * 3. [blk 1,4 SS:2]
assertThat(contentSummary.getSpaceConsumed(), is(30L));
assertBlockNotPresent(lastBlk);
}
fs.deleteSnapshot(parent, ss[deleteOrder[1]]);
assertFileLength(snapshotFiles[deleteOrder[2]], length[deleteOrder[2]]);
assertFileLength(src, length[2]);
assertBlockExists(firstBlk);
contentSummary = fs.getContentSummary(parent);
if(fs.exists(snapshotFiles[0])) {
// Diskspace consumed should be 14 bytes * 3. [blk 1,4 SS:2,3]
assertThat(contentSummary.getSpaceConsumed(), is(42L));
assertBlockExists(lastBlk);
} else if(fs.exists(snapshotFiles[1])) {
// Diskspace consumed should be 10 bytes * 3. [blk 1,4 SS:2]
assertThat(contentSummary.getSpaceConsumed(), is(30L));
assertBlockNotPresent(lastBlk);
} else {
// Diskspace consumed should be 6 bytes * 3. [blk 1,4 SS:]
assertThat(contentSummary.getSpaceConsumed(), is(18L));
assertBlockNotPresent(lastBlk);
}
fs.deleteSnapshot(parent, ss[deleteOrder[2]]);
assertFileLength(src, length[2]);
assertBlockExists(firstBlk);
// Diskspace consumed should be 6 bytes * 3. [blk 1,4 SS:]
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(18L));
assertThat(contentSummary.getLength(), is(6L));
fs.delete(src, false);
assertBlockNotPresent(firstBlk);
// Diskspace consumed should be 0 bytes * 3. []
contentSummary = fs.getContentSummary(parent);
assertThat(contentSummary.getSpaceConsumed(), is(0L));
}
/**
* Failure / recovery test for truncate.
* In this failure the DNs fail to recover the blocks and the NN triggers
* lease recovery.
* File stays in RecoveryInProgress until DataNodes report recovery.
*/
@Test
public void testTruncateFailure() throws IOException {
int startingFileSize = 2 * BLOCK_SIZE + BLOCK_SIZE / 2;
int toTruncate = 1;
byte[] contents = AppendTestUtil.initBuffer(startingFileSize);
final Path dir = new Path("/dir");
final Path p = new Path(dir, "testTruncateFailure");
{
FSDataOutputStream out = fs.create(p, false, BLOCK_SIZE, REPLICATION,
BLOCK_SIZE);
out.write(contents, 0, startingFileSize);
try {
fs.truncate(p, 0);
fail("Truncate must fail on open file.");
} catch (IOException expected) {
GenericTestUtils.assertExceptionContains(
"Failed to TRUNCATE_FILE", expected);
} finally {
out.close();
}
}
{
FSDataOutputStream out = fs.append(p);
try {
fs.truncate(p, 0);
fail("Truncate must fail for append.");
} catch (IOException expected) {
GenericTestUtils.assertExceptionContains(
"Failed to TRUNCATE_FILE", expected);
} finally {
out.close();
}
}
try {
fs.truncate(p, -1);
fail("Truncate must fail for a negative new length.");
} catch (HadoopIllegalArgumentException expected) {
GenericTestUtils.assertExceptionContains(
"Cannot truncate to a negative file size", expected);
}
try {
fs.truncate(p, startingFileSize + 1);
fail("Truncate must fail for a larger new length.");
} catch (Exception expected) {
GenericTestUtils.assertExceptionContains(
"Cannot truncate to a larger file size", expected);
}
try {
fs.truncate(dir, 0);
fail("Truncate must fail for a directory.");
} catch (Exception expected) {
GenericTestUtils.assertExceptionContains(
"Path is not a file", expected);
}
try {
fs.truncate(new Path(dir, "non-existing"), 0);
fail("Truncate must fail for a non-existing file.");
} catch (Exception expected) {
GenericTestUtils.assertExceptionContains(
"File does not exist", expected);
}
fs.setPermission(p, FsPermission.createImmutable((short)0664));
{
final UserGroupInformation fooUgi =
UserGroupInformation.createUserForTesting("foo", new String[]{"foo"});
try {
final FileSystem foofs = DFSTestUtil.getFileSystemAs(fooUgi, conf);
foofs.truncate(p, 0);
fail("Truncate must fail for no WRITE permission.");
} catch (Exception expected) {
GenericTestUtils.assertExceptionContains(
"Permission denied", expected);
}
}
cluster.shutdownDataNodes();
NameNodeAdapter.getLeaseManager(cluster.getNamesystem())
.setLeasePeriod(LOW_SOFTLIMIT, LOW_HARDLIMIT);
int newLength = startingFileSize - toTruncate;
boolean isReady = fs.truncate(p, newLength);
assertThat("truncate should have triggered block recovery.",
isReady, is(false));
{
try {
fs.truncate(p, 0);
fail("Truncate must fail since a trancate is already in pregress.");
} catch (IOException expected) {
GenericTestUtils.assertExceptionContains(
"Failed to TRUNCATE_FILE", expected);
}
}
boolean recoveryTriggered = false;
for(int i = 0; i < RECOVERY_ATTEMPTS; i++) {
String leaseHolder =
NameNodeAdapter.getLeaseHolderForPath(cluster.getNameNode(),
p.toUri().getPath());
if(leaseHolder.equals(HdfsServerConstants.NAMENODE_LEASE_HOLDER)) {
recoveryTriggered = true;
break;
}
try { Thread.sleep(SLEEP); } catch (InterruptedException ignored) {}
}
assertThat("lease recovery should have occurred in ~" +
SLEEP * RECOVERY_ATTEMPTS + " ms.", recoveryTriggered, is(true));
cluster.startDataNodes(conf, DATANODE_NUM, true,
StartupOption.REGULAR, null);
cluster.waitActive();
checkBlockRecovery(p);
NameNodeAdapter.getLeaseManager(cluster.getNamesystem())
.setLeasePeriod(HdfsServerConstants.LEASE_SOFTLIMIT_PERIOD,
HdfsServerConstants.LEASE_HARDLIMIT_PERIOD);
checkFullFile(p, newLength, contents);
fs.delete(p, false);
}
/**
* The last block is truncated at mid. (non copy-on-truncate)
* dn0 is shutdown before truncate and restart after truncate successful.
*/
@Test(timeout=60000)
public void testTruncateWithDataNodesRestart() throws Exception {
int startingFileSize = 3 * BLOCK_SIZE;
byte[] contents = AppendTestUtil.initBuffer(startingFileSize);
final Path p = new Path(parent, "testTruncateWithDataNodesRestart");
writeContents(contents, startingFileSize, p);
LocatedBlock oldBlock = getLocatedBlocks(p).getLastLocatedBlock();
int dn = 0;
int toTruncateLength = 1;
int newLength = startingFileSize - toTruncateLength;
cluster.getDataNodes().get(dn).shutdown();
try {
boolean isReady = fs.truncate(p, newLength);
assertFalse(isReady);
} finally {
cluster.restartDataNode(dn, true, true);
cluster.waitActive();
}
checkBlockRecovery(p);
LocatedBlock newBlock = getLocatedBlocks(p).getLastLocatedBlock();
/*
* For non copy-on-truncate, the truncated block id is the same, but the
* GS should increase.
* The truncated block will be replicated to dn0 after it restarts.
*/
assertEquals(newBlock.getBlock().getBlockId(),
oldBlock.getBlock().getBlockId());
assertEquals(newBlock.getBlock().getGenerationStamp(),
oldBlock.getBlock().getGenerationStamp() + 1);
// Wait replicas come to 3
DFSTestUtil.waitReplication(fs, p, REPLICATION);
// Old replica is disregarded and replaced with the truncated one
assertEquals(cluster.getBlockFile(dn, newBlock.getBlock()).length(),
newBlock.getBlockSize());
assertTrue(cluster.getBlockMetadataFile(dn,
newBlock.getBlock()).getName().endsWith(
newBlock.getBlock().getGenerationStamp() + ".meta"));
// Validate the file
FileStatus fileStatus = fs.getFileStatus(p);
assertThat(fileStatus.getLen(), is((long) newLength));
checkFullFile(p, newLength, contents);
fs.delete(parent, true);
}
/**
* The last block is truncated at mid. (copy-on-truncate)
* dn1 is shutdown before truncate and restart after truncate successful.
*/
@Test(timeout=60000)
public void testCopyOnTruncateWithDataNodesRestart() throws Exception {
int startingFileSize = 3 * BLOCK_SIZE;
byte[] contents = AppendTestUtil.initBuffer(startingFileSize);
final Path p = new Path(parent, "testCopyOnTruncateWithDataNodesRestart");
writeContents(contents, startingFileSize, p);
LocatedBlock oldBlock = getLocatedBlocks(p).getLastLocatedBlock();
fs.allowSnapshot(parent);
fs.createSnapshot(parent, "ss0");
int dn = 1;
int toTruncateLength = 1;
int newLength = startingFileSize - toTruncateLength;
cluster.getDataNodes().get(dn).shutdown();
try {
boolean isReady = fs.truncate(p, newLength);
assertFalse(isReady);
} finally {
cluster.restartDataNode(dn, true, true);
cluster.waitActive();
}
checkBlockRecovery(p);
LocatedBlock newBlock = getLocatedBlocks(p).getLastLocatedBlock();
/*
* For copy-on-truncate, new block is made with new block id and new GS.
* The replicas of the new block is 2, then it will be replicated to dn1.
*/
assertNotEquals(newBlock.getBlock().getBlockId(),
oldBlock.getBlock().getBlockId());
assertEquals(newBlock.getBlock().getGenerationStamp(),
oldBlock.getBlock().getGenerationStamp() + 1);
// Wait replicas come to 3
DFSTestUtil.waitReplication(fs, p, REPLICATION);
// New block is replicated to dn1
assertEquals(cluster.getBlockFile(dn, newBlock.getBlock()).length(),
newBlock.getBlockSize());
// Old replica exists too since there is snapshot
assertEquals(cluster.getBlockFile(dn, oldBlock.getBlock()).length(),
oldBlock.getBlockSize());
assertTrue(cluster.getBlockMetadataFile(dn,
oldBlock.getBlock()).getName().endsWith(
oldBlock.getBlock().getGenerationStamp() + ".meta"));
// Validate the file
FileStatus fileStatus = fs.getFileStatus(p);
assertThat(fileStatus.getLen(), is((long) newLength));
checkFullFile(p, newLength, contents);
fs.deleteSnapshot(parent, "ss0");
fs.delete(parent, true);
}
/**
* The last block is truncated at mid. (non copy-on-truncate)
* dn0, dn1 are restarted immediately after truncate.
*/
@Test(timeout=60000)
public void testTruncateWithDataNodesRestartImmediately() throws Exception {
int startingFileSize = 3 * BLOCK_SIZE;
byte[] contents = AppendTestUtil.initBuffer(startingFileSize);
final Path p = new Path(parent, "testTruncateWithDataNodesRestartImmediately");
writeContents(contents, startingFileSize, p);
LocatedBlock oldBlock = getLocatedBlocks(p).getLastLocatedBlock();
int dn0 = 0;
int dn1 = 1;
int toTruncateLength = 1;
int newLength = startingFileSize - toTruncateLength;
boolean isReady = fs.truncate(p, newLength);
assertFalse(isReady);
cluster.restartDataNode(dn0, true, true);
cluster.restartDataNode(dn1, true, true);
cluster.waitActive();
checkBlockRecovery(p);
LocatedBlock newBlock = getLocatedBlocks(p).getLastLocatedBlock();
/*
* For non copy-on-truncate, the truncated block id is the same, but the
* GS should increase.
*/
assertEquals(newBlock.getBlock().getBlockId(),
oldBlock.getBlock().getBlockId());
assertEquals(newBlock.getBlock().getGenerationStamp(),
oldBlock.getBlock().getGenerationStamp() + 1);
Thread.sleep(2000);
// trigger the second time BR to delete the corrupted replica if there's one
cluster.triggerBlockReports();
// Wait replicas come to 3
DFSTestUtil.waitReplication(fs, p, REPLICATION);
// Old replica is disregarded and replaced with the truncated one on dn0
assertEquals(cluster.getBlockFile(dn0, newBlock.getBlock()).length(),
newBlock.getBlockSize());
assertTrue(cluster.getBlockMetadataFile(dn0,
newBlock.getBlock()).getName().endsWith(
newBlock.getBlock().getGenerationStamp() + ".meta"));
// Old replica is disregarded and replaced with the truncated one on dn1
assertEquals(cluster.getBlockFile(dn1, newBlock.getBlock()).length(),
newBlock.getBlockSize());
assertTrue(cluster.getBlockMetadataFile(dn1,
newBlock.getBlock()).getName().endsWith(
newBlock.getBlock().getGenerationStamp() + ".meta"));
// Validate the file
FileStatus fileStatus = fs.getFileStatus(p);
assertThat(fileStatus.getLen(), is((long) newLength));
checkFullFile(p, newLength, contents);
fs.delete(parent, true);
}
/**
* The last block is truncated at mid. (non copy-on-truncate)
* shutdown the datanodes immediately after truncate.
*/
@Test(timeout=60000)
public void testTruncateWithDataNodesShutdownImmediately() throws Exception {
int startingFileSize = 3 * BLOCK_SIZE;
byte[] contents = AppendTestUtil.initBuffer(startingFileSize);
final Path p = new Path(parent, "testTruncateWithDataNodesShutdownImmediately");
writeContents(contents, startingFileSize, p);
int toTruncateLength = 1;
int newLength = startingFileSize - toTruncateLength;
boolean isReady = fs.truncate(p, newLength);
assertFalse(isReady);
cluster.shutdownDataNodes();
cluster.setDataNodesDead();
try {
for(int i = 0; i < SUCCESS_ATTEMPTS && cluster.isDataNodeUp(); i++) {
Thread.sleep(SLEEP);
}
assertFalse("All DataNodes should be down.", cluster.isDataNodeUp());
LocatedBlocks blocks = getLocatedBlocks(p);
assertTrue(blocks.isUnderConstruction());
} finally {
cluster.startDataNodes(conf, DATANODE_NUM, true,
StartupOption.REGULAR, null);
cluster.waitActive();
}
checkBlockRecovery(p);
fs.delete(parent, true);
}
/**
* EditLogOp load test for Truncate.
*/
@Test
public void testTruncateEditLogLoad() throws IOException {
// purge previously accumulated edits
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace();
fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
int startingFileSize = 2 * BLOCK_SIZE + BLOCK_SIZE / 2;
int toTruncate = 1;
final String s = "/testTruncateEditLogLoad";
final Path p = new Path(s);
byte[] contents = AppendTestUtil.initBuffer(startingFileSize);
writeContents(contents, startingFileSize, p);
int newLength = startingFileSize - toTruncate;
boolean isReady = fs.truncate(p, newLength);
assertThat("truncate should have triggered block recovery.",
isReady, is(false));
cluster.restartNameNode();
String holder = UserGroupInformation.getCurrentUser().getUserName();
cluster.getNamesystem().recoverLease(s, holder, "");
checkBlockRecovery(p);
checkFullFile(p, newLength, contents);
fs.delete(p, false);
}
/**
* Upgrade, RollBack, and restart test for Truncate.
*/
@Test
public void testUpgradeAndRestart() throws IOException {
fs.mkdirs(parent);
fs.setQuota(parent, 100, 1000);
fs.allowSnapshot(parent);
String truncateFile = "testUpgrade";
final Path p = new Path(parent, truncateFile);
int startingFileSize = 2 * BLOCK_SIZE;
int toTruncate = 1;
byte[] contents = AppendTestUtil.initBuffer(startingFileSize);
writeContents(contents, startingFileSize, p);
Path snapshotDir = fs.createSnapshot(parent, "ss0");
Path snapshotFile = new Path(snapshotDir, truncateFile);
int newLengthBeforeUpgrade = startingFileSize - toTruncate;
boolean isReady = fs.truncate(p, newLengthBeforeUpgrade);
assertThat("truncate should have triggered block recovery.",
isReady, is(false));
checkBlockRecovery(p);
checkFullFile(p, newLengthBeforeUpgrade, contents);
assertFileLength(snapshotFile, startingFileSize);
long totalBlockBefore = cluster.getNamesystem().getBlocksTotal();
restartCluster(StartupOption.UPGRADE);
assertThat("SafeMode should be OFF",
cluster.getNamesystem().isInSafeMode(), is(false));
assertThat("NameNode should be performing upgrade.",
cluster.getNamesystem().isUpgradeFinalized(), is(false));
FileStatus fileStatus = fs.getFileStatus(p);
assertThat(fileStatus.getLen(), is((long) newLengthBeforeUpgrade));
int newLengthAfterUpgrade = newLengthBeforeUpgrade - toTruncate;
Block oldBlk = getLocatedBlocks(p).getLastLocatedBlock()
.getBlock().getLocalBlock();
isReady = fs.truncate(p, newLengthAfterUpgrade);
assertThat("truncate should have triggered block recovery.",
isReady, is(false));
fileStatus = fs.getFileStatus(p);
assertThat(fileStatus.getLen(), is((long) newLengthAfterUpgrade));
assertThat("Should copy on truncate during upgrade",
getLocatedBlocks(p).getLastLocatedBlock().getBlock()
.getLocalBlock().getBlockId(), is(not(equalTo(oldBlk.getBlockId()))));
checkBlockRecovery(p);
checkFullFile(p, newLengthAfterUpgrade, contents);
assertThat("Total block count should be unchanged from copy-on-truncate",
cluster.getNamesystem().getBlocksTotal(), is(totalBlockBefore));
restartCluster(StartupOption.ROLLBACK);
assertThat("File does not exist " + p, fs.exists(p), is(true));
fileStatus = fs.getFileStatus(p);
assertThat(fileStatus.getLen(), is((long) newLengthBeforeUpgrade));
checkFullFile(p, newLengthBeforeUpgrade, contents);
assertThat("Total block count should be unchanged from rolling back",
cluster.getNamesystem().getBlocksTotal(), is(totalBlockBefore));
restartCluster(StartupOption.REGULAR);
assertThat("Total block count should be unchanged from start-up",
cluster.getNamesystem().getBlocksTotal(), is(totalBlockBefore));
checkFullFile(p, newLengthBeforeUpgrade, contents);
assertFileLength(snapshotFile, startingFileSize);
// empty edits and restart
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace();
cluster.restartNameNode(true);
assertThat("Total block count should be unchanged from start-up",
cluster.getNamesystem().getBlocksTotal(), is(totalBlockBefore));
checkFullFile(p, newLengthBeforeUpgrade, contents);
assertFileLength(snapshotFile, startingFileSize);
fs.deleteSnapshot(parent, "ss0");
fs.delete(parent, true);
assertThat("File " + p + " shouldn't exist", fs.exists(p), is(false));
}
/**
* Check truncate recovery.
*/
@Test
public void testTruncateRecovery() throws IOException {
FSNamesystem fsn = cluster.getNamesystem();
String client = "client";
String clientMachine = "clientMachine";
String src = "/test/testTruncateRecovery";
Path srcPath = new Path(src);
byte[] contents = AppendTestUtil.initBuffer(BLOCK_SIZE);
writeContents(contents, BLOCK_SIZE, srcPath);
INodesInPath iip = fsn.getFSDirectory().getINodesInPath4Write(src, true);
INodeFile file = iip.getLastINode().asFile();
long initialGenStamp = file.getLastBlock().getGenerationStamp();
// Test that prepareFileForTruncate sets up in-place truncate.
fsn.writeLock();
try {
Block oldBlock = file.getLastBlock();
Block truncateBlock = FSDirTruncateOp.prepareFileForTruncate(fsn, iip,
client, clientMachine, 1, null);
// In-place truncate uses old block id with new genStamp.
assertThat(truncateBlock.getBlockId(),
is(equalTo(oldBlock.getBlockId())));
assertThat(truncateBlock.getNumBytes(),
is(oldBlock.getNumBytes()));
assertThat(truncateBlock.getGenerationStamp(),
is(fsn.getBlockIdManager().getGenerationStampV2()));
assertThat(file.getLastBlock().getBlockUCState(),
is(HdfsServerConstants.BlockUCState.UNDER_RECOVERY));
long blockRecoveryId = file.getLastBlock().getUnderConstructionFeature()
.getBlockRecoveryId();
assertThat(blockRecoveryId, is(initialGenStamp + 1));
fsn.getEditLog().logTruncate(
src, client, clientMachine, BLOCK_SIZE-1, Time.now(), truncateBlock);
} finally {
fsn.writeUnlock();
}
// Re-create file and ensure we are ready to copy on truncate
writeContents(contents, BLOCK_SIZE, srcPath);
fs.allowSnapshot(parent);
fs.createSnapshot(parent, "ss0");
iip = fsn.getFSDirectory().getINodesInPath(src, true);
file = iip.getLastINode().asFile();
file.recordModification(iip.getLatestSnapshotId(), true);
assertThat(file.isBlockInLatestSnapshot(file.getLastBlock()), is(true));
initialGenStamp = file.getLastBlock().getGenerationStamp();
// Test that prepareFileForTruncate sets up copy-on-write truncate
fsn.writeLock();
try {
Block oldBlock = file.getLastBlock();
Block truncateBlock = FSDirTruncateOp.prepareFileForTruncate(fsn, iip,
client, clientMachine, 1, null);
// Copy-on-write truncate makes new block with new id and genStamp
assertThat(truncateBlock.getBlockId(),
is(not(equalTo(oldBlock.getBlockId()))));
assertThat(truncateBlock.getNumBytes() < oldBlock.getNumBytes(),
is(true));
assertThat(truncateBlock.getGenerationStamp(),
is(fsn.getBlockIdManager().getGenerationStampV2()));
assertThat(file.getLastBlock().getBlockUCState(),
is(HdfsServerConstants.BlockUCState.UNDER_RECOVERY));
long blockRecoveryId = file.getLastBlock().getUnderConstructionFeature()
.getBlockRecoveryId();
assertThat(blockRecoveryId, is(initialGenStamp + 1));
fsn.getEditLog().logTruncate(
src, client, clientMachine, BLOCK_SIZE-1, Time.now(), truncateBlock);
} finally {
fsn.writeUnlock();
}
checkBlockRecovery(srcPath);
fs.deleteSnapshot(parent, "ss0");
fs.delete(parent, true);
}
@Test
public void testTruncateShellCommand() throws Exception {
final Path src = new Path("/test/testTruncateShellCommand");
final int oldLength = 2*BLOCK_SIZE + 1;
final int newLength = BLOCK_SIZE + 1;
String[] argv =
new String[]{"-truncate", String.valueOf(newLength), src.toString()};
runTruncateShellCommand(src, oldLength, argv);
// wait for block recovery
checkBlockRecovery(src);
assertThat(fs.getFileStatus(src).getLen(), is((long) newLength));
fs.delete(parent, true);
}
@Test
public void testTruncateShellCommandOnBlockBoundary() throws Exception {
final Path src = new Path("/test/testTruncateShellCommandOnBoundary");
final int oldLength = 2 * BLOCK_SIZE;
final int newLength = BLOCK_SIZE;
String[] argv =
new String[]{"-truncate", String.valueOf(newLength), src.toString()};
runTruncateShellCommand(src, oldLength, argv);
// shouldn't need to wait for block recovery
assertThat(fs.getFileStatus(src).getLen(), is((long) newLength));
fs.delete(parent, true);
}
@Test
public void testTruncateShellCommandWithWaitOption() throws Exception {
final Path src = new Path("/test/testTruncateShellCommandWithWaitOption");
final int oldLength = 2 * BLOCK_SIZE + 1;
final int newLength = BLOCK_SIZE + 1;
String[] argv = new String[]{"-truncate", "-w", String.valueOf(newLength),
src.toString()};
runTruncateShellCommand(src, oldLength, argv);
// shouldn't need to wait for block recovery
assertThat(fs.getFileStatus(src).getLen(), is((long) newLength));
fs.delete(parent, true);
}
private void runTruncateShellCommand(Path src, int oldLength,
String[] shellOpts) throws Exception {
// create file and write data
writeContents(AppendTestUtil.initBuffer(oldLength), oldLength, src);
assertThat(fs.getFileStatus(src).getLen(), is((long)oldLength));
// truncate file using shell
FsShell shell = null;
try {
shell = new FsShell(conf);
assertThat(ToolRunner.run(shell, shellOpts), is(0));
} finally {
if(shell != null) {
shell.close();
}
}
}
@Test
public void testTruncate4Symlink() throws IOException {
final int fileLength = 3 * BLOCK_SIZE;
fs.mkdirs(parent);
final byte[] contents = AppendTestUtil.initBuffer(fileLength);
final Path file = new Path(parent, "testTruncate4Symlink");
writeContents(contents, fileLength, file);
final Path link = new Path(parent, "link");
fs.createSymlink(file, link, false);
final int newLength = fileLength/3;
boolean isReady = fs.truncate(link, newLength);
assertTrue("Recovery is not expected.", isReady);
FileStatus fileStatus = fs.getFileStatus(file);
assertThat(fileStatus.getLen(), is((long) newLength));
ContentSummary cs = fs.getContentSummary(parent);
assertEquals("Bad disk space usage",
cs.getSpaceConsumed(), newLength * REPLICATION);
// validate the file content
checkFullFile(file, newLength, contents);
fs.delete(parent, true);
}
static void writeContents(byte[] contents, int fileLength, Path p)
throws IOException {
FSDataOutputStream out = fs.create(p, true, BLOCK_SIZE, REPLICATION,
BLOCK_SIZE);
out.write(contents, 0, fileLength);
out.close();
}
static void checkBlockRecovery(Path p) throws IOException {
checkBlockRecovery(p, fs);
}
public static void checkBlockRecovery(Path p, DistributedFileSystem dfs)
throws IOException {
checkBlockRecovery(p, dfs, SUCCESS_ATTEMPTS, SLEEP);
}
public static void checkBlockRecovery(Path p, DistributedFileSystem dfs,
int attempts, long sleepMs) throws IOException {
boolean success = false;
for(int i = 0; i < attempts; i++) {
LocatedBlocks blocks = getLocatedBlocks(p, dfs);
boolean noLastBlock = blocks.getLastLocatedBlock() == null;
if(!blocks.isUnderConstruction() &&
(noLastBlock || blocks.isLastBlockComplete())) {
success = true;
break;
}
try { Thread.sleep(sleepMs); } catch (InterruptedException ignored) {}
}
assertThat("inode should complete in ~" + sleepMs * attempts + " ms.",
success, is(true));
}
static LocatedBlocks getLocatedBlocks(Path src) throws IOException {
return getLocatedBlocks(src, fs);
}
static LocatedBlocks getLocatedBlocks(Path src, DistributedFileSystem dfs)
throws IOException {
return dfs.getClient().getLocatedBlocks(src.toString(), 0, Long.MAX_VALUE);
}
static void assertBlockExists(Block blk) {
assertNotNull("BlocksMap does not contain block: " + blk,
cluster.getNamesystem().getStoredBlock(blk));
}
static void assertBlockNotPresent(Block blk) {
assertNull("BlocksMap should not contain block: " + blk,
cluster.getNamesystem().getStoredBlock(blk));
}
static void assertFileLength(Path file, long length) throws IOException {
byte[] data = DFSTestUtil.readFileBuffer(fs, file);
assertEquals("Wrong data size in snapshot.", length, data.length);
}
static void checkFullFile(Path p, int newLength, byte[] contents)
throws IOException {
AppendTestUtil.checkFullFile(fs, p, newLength, contents, p.toString());
}
static void restartCluster(StartupOption o)
throws IOException {
cluster.shutdown();
if(StartupOption.ROLLBACK == o)
NameNode.doRollback(conf, false);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(DATANODE_NUM)
.format(false)
.nameNodePort(HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT)
.startupOption(o==StartupOption.ROLLBACK ? StartupOption.REGULAR : o)
.dnStartupOption(o!=StartupOption.ROLLBACK ? StartupOption.REGULAR : o)
.build();
fs = cluster.getFileSystem();
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.obfuscation;
import org.spongepowered.despector.ast.SourceSet;
import org.spongepowered.despector.config.LibraryConfiguration;
import org.spongepowered.despector.decompiler.BaseDecompiler;
import org.spongepowered.despector.decompiler.Decompilers;
import org.spongepowered.despector.decompiler.DirectoryWalker;
import org.spongepowered.despector.util.TypeHelper;
import org.spongepowered.despector.util.serialization.AstLoader;
import org.spongepowered.despector.util.serialization.MessagePacker;
import org.spongepowered.obfuscation.config.ObfConfigManager;
import org.spongepowered.obfuscation.data.MappingUsageFinder;
import org.spongepowered.obfuscation.data.MappingsIO;
import org.spongepowered.obfuscation.data.MappingsSet;
import org.spongepowered.obfuscation.data.MappingsSet.MethodMapping;
import org.spongepowered.obfuscation.data.UnknownMemberMapper;
import org.spongepowered.obfuscation.data.UnknownTypeMapper;
import org.spongepowered.obfuscation.merge.MergeEngine;
import org.spongepowered.obfuscation.merge.operation.CustomMethodMergers;
import org.spongepowered.obfuscation.merge.operation.MatchDiscreteFields;
import org.spongepowered.obfuscation.merge.operation.MatchDiscreteMethods;
import org.spongepowered.obfuscation.merge.operation.MatchEnums;
import org.spongepowered.obfuscation.merge.operation.MatchInnerClasses;
import org.spongepowered.obfuscation.merge.operation.MatchMethodGroups;
import org.spongepowered.obfuscation.merge.operation.MatchReferences;
import org.spongepowered.obfuscation.merge.operation.MatchStringConstants;
import org.spongepowered.obfuscation.merge.operation.MergeInitializers;
import org.spongepowered.obfuscation.merge.operation.MergeMatchedFields;
import org.spongepowered.obfuscation.merge.operation.MergeMatchedMethods;
import org.spongepowered.obfuscation.merge.operation.MergeMatchedTypes;
import org.spongepowered.obfuscation.merge.operation.MergeSyntheticOverloads;
import org.spongepowered.obfuscation.merge.operation.VoteCollector;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
public class ObfuscationMapper {
private static final Map<String, Consumer<String>> flags = new HashMap<>();
private static boolean is_cached = false;
private static boolean output_unmatched = false;
private static String validation_mappings = null;
private static String seed_mappings = null;
private static String prev_mappings = null;
static {
flags.put("--config=", (arg) -> {
String config = arg.substring(9);
Path config_path = Paths.get(".").resolve(config);
ObfConfigManager.load(config_path);
if (!Files.exists(config_path)) {
ObfConfigManager.update();
}
});
flags.put("--validation=", (arg) -> {
validation_mappings = arg.substring(13);
});
flags.put("--seed=", (arg) -> {
seed_mappings = arg.substring(7);
});
flags.put("--previous=", (arg) -> {
prev_mappings = arg.substring(11);
});
flags.put("--cache", (arg) -> {
is_cached = true;
});
flags.put("--output_unmatched", (arg) -> {
output_unmatched = true;
});
}
public static void main(String[] args) throws IOException {
if (args.length == 0) {
System.out.println("Usage: java -jar ObfuscationMapper.jar old.jar old_mappings.srg new.jar output_mappings.srg");
return;
}
LibraryConfiguration.quiet = true;
LibraryConfiguration.parallel = false;
String old_jar = null;
String old_mappings_dir = null;
String new_jar = null;
String output_mappings = null;
int o = 0;
outer: for (int i = 0; i < args.length; i++) {
if (args[i].startsWith("-")) {
for (String flag : flags.keySet()) {
if (args[i].startsWith(flag)) {
flags.get(flag).accept(args[i]);
continue outer;
}
}
System.err.println("Unknown flag: " + args[i]);
} else if (o == 0) {
old_jar = args[i];
} else if (o == 1) {
old_mappings_dir = args[i];
} else if (o == 2) {
new_jar = args[i];
} else if (o == 3) {
output_mappings = args[i];
}
o++;
}
if (new_jar == null) {
System.out.println("Missing some args");
System.out.println("Usage: java -jar ObfuscationMapper.jar old.jar old_mappings_dir new.jar");
return;
}
if (output_mappings == null) {
output_mappings = "output.srg";
}
Path root = Paths.get("");
Path old_mappings_root = root.resolve(old_mappings_dir);
if (!Files.exists(old_mappings_root)) {
System.err.println("Unknown mappings: " + old_mappings_root.toAbsolutePath().toString());
return;
}
MappingsSet old_mappings = MappingsIO.load(old_mappings_root);
MappingsSet new_mappings = null;
if (seed_mappings != null) {
Path seed = root.resolve(seed_mappings);
if (!Files.exists(seed)) {
System.err.println("Unknown seed mappings: " + seed.toAbsolutePath().toString());
return;
}
new_mappings = MappingsIO.load(seed);
} else {
new_mappings = new MappingsSet();
}
MappingsSet validation = null;
if (validation_mappings != null) {
Path validation_mappings_path = root.resolve(validation_mappings);
if (!Files.exists(validation_mappings_path)) {
System.err.println("Validation mappings " + validation_mappings + " not found");
} else {
System.out.println("Loading validation mappings");
validation = MappingsIO.load(validation_mappings_path);
}
}
MappingsSet previous = null;
if (prev_mappings != null) {
Path prev_mappings_path = root.resolve(prev_mappings);
if (!Files.exists(prev_mappings_path)) {
System.err.println("Previous mappings " + prev_mappings + " not found");
} else {
System.out.println("Loading previous mappings");
previous = MappingsIO.load(prev_mappings_path);
}
}
SourceSet old_sourceset = new SourceSet();
SourceSet new_sourceset = new SourceSet();
BaseDecompiler decompiler = Decompilers.JAVA;
Path old_serialized = root.resolve(old_jar.replace('/', '_').replace('\\', '_') + ".ast");
if (is_cached && Files.exists(old_serialized)) {
long start = System.nanoTime();
AstLoader.loadSources(old_sourceset, new BufferedInputStream(new FileInputStream(old_serialized.toFile())));
long end = System.nanoTime();
System.out.println("Loaded cached ast with " + old_sourceset.getAllClasses().size() + " classes from the older version");
System.out.println("Loaded in " + ((end - start) / 1000000) + "ms");
} else {
long start = System.nanoTime();
Path old_jar_path = root.resolve(old_jar);
DirectoryWalker walker = new DirectoryWalker(old_jar_path);
walker.walk(old_sourceset, decompiler);
decompiler.flushTasks();
long end = System.nanoTime();
System.out.println("Loaded and decompiled " + old_sourceset.getAllClasses().size() + " classes from the older version");
System.out.println("Loaded in " + ((end - start) / 1000000) + "ms");
int failed = LibraryConfiguration.failed_method_count;
int total = LibraryConfiguration.total_method_count;
System.out.println("Failed to decompile " + failed + "/" + total + " (" + ((failed / (float) total) * 100) + "%)");
LibraryConfiguration.failed_method_count = 0;
LibraryConfiguration.total_method_count = 0;
if (is_cached) {
try (MessagePacker packer = new MessagePacker(new FileOutputStream(old_serialized.toFile()))) {
old_sourceset.writeTo(packer);
}
}
}
Path new_serialized = root.resolve(new_jar.replace('/', '_').replace('\\', '_') + ".ast");
if (is_cached && Files.exists(new_serialized)) {
long start = System.nanoTime();
AstLoader.loadSources(new_sourceset, new BufferedInputStream(new FileInputStream(new_serialized.toFile())));
long end = System.nanoTime();
System.out.println("Loaded cached ast with " + new_sourceset.getAllClasses().size() + " classes from the newer version");
System.out.println("Loaded in " + ((end - start) / 1000000) + "ms");
} else {
long start = System.nanoTime();
Path new_jar_path = root.resolve(new_jar);
DirectoryWalker walker = new DirectoryWalker(new_jar_path);
walker.walk(new_sourceset, decompiler);
decompiler.flushTasks();
long end = System.nanoTime();
System.out.println("Loaded and decompiled " + new_sourceset.getAllClasses().size() + " classes from the newer version");
System.out.println("Loaded in " + ((end - start) / 1000000) + "ms");
int failed = LibraryConfiguration.failed_method_count;
int total = LibraryConfiguration.total_method_count;
System.out.println("Failed to decompile " + failed + "/" + total + " (" + ((failed / (float) total) * 100) + "%)");
LibraryConfiguration.failed_method_count = 0;
LibraryConfiguration.total_method_count = 0;
if (is_cached) {
try (MessagePacker packer = new MessagePacker(new FileOutputStream(new_serialized.toFile()))) {
new_sourceset.writeTo(packer);
}
}
}
MergeEngine engine = new MergeEngine(old_sourceset, old_mappings, new_sourceset, new_mappings);
engine.addOperation(new MergeSyntheticOverloads());
engine.addOperation(new MatchStringConstants());
engine.addOperation(new MatchEnums());
engine.addOperation(new MergeInitializers());
engine.addOperation(new MatchReferences());
engine.addOperation(new MatchDiscreteFields());
engine.addOperation(new MatchMethodGroups());
engine.addOperation(new MatchDiscreteMethods());
engine.addOperation(new MatchInnerClasses());
engine.addOperation(new MergeMatchedTypes());
engine.addOperation(new CustomMethodMergers());
engine.addOperation(new MergeMatchedMethods());
engine.addOperation(new MergeMatchedFields());
engine.addOperation(new VoteCollector());
engine.addOperation(MergeEngine.jumpTo(3, (e) -> {
int ch = e.getChangesLastCycle();
e.resetChanges();
return ch > 0;
}));
engine.merge();
MappingUsageFinder usage = new MappingUsageFinder(old_mappings);
old_sourceset.accept(usage);
if (validation != null) {
int type_validation_errors = 0;
int method_validation_errors = 0;
int field_validation_errors = 0;
for (String mapped : new_mappings.getMappedTypes()) {
String new_mapped = new_mappings.mapTypeSafe(mapped);
String val_mapped = validation.mapType(mapped);
if (!new_mapped.equals(val_mapped)) {
System.out.println("Mapped " + mapped + " to " + new_mapped + " but should have been " + val_mapped);
type_validation_errors++;
}
}
for (String mapped : new_mappings.getMappedMethods()) {
for (MethodMapping map : new_mappings.getMethods(mapped)) {
String new_mapped = new_mappings.mapMethodSafe(map.getObfOwner(), map.getObf(), map.getObfSignature());
String val_mapped = validation.mapMethodSafe(map.getObfOwner(), map.getObf(), map.getObfSignature());
if (!new_mapped.equals(val_mapped)) {
System.out.println("Mapped method " + mapped + " to " + new_mapped + " but should have been " + val_mapped);
method_validation_errors++;
}
}
}
for (String mapped : new_mappings.getMappedFields()) {
String new_mapped = new_mappings.mapField(mapped);
String val_mapped = validation.mapField(mapped);
if (!new_mapped.equals(val_mapped)) {
System.out.println("Mapped field " + mapped + " to " + new_mapped + " but should have been " + val_mapped);
field_validation_errors++;
}
}
System.out.println("Mapped " + new_mappings.packagesCount() + " packages");
float type_percent = (new_mappings.typeCount() / (float) usage.getSeenTypes()) * 100.0f;
System.out.printf("Mapped %d/%d classes (%.2f%%)\n", new_mappings.typeCount(), usage.getSeenTypes(), type_percent);
float field_percent = (new_mappings.fieldCount() / (float) usage.getSeenFields()) * 100.0f;
System.out.printf("Mapped %d/%d fields (%.2f%%)\n", new_mappings.fieldCount(), usage.getSeenFields(), field_percent);
float method_percent = (new_mappings.methodCount() / (float) usage.getSeenMethods()) * 100.0f;
System.out.printf("Mapped %d/%d methods (%.2f%%)\n", new_mappings.methodCount(), usage.getSeenMethods(), method_percent);
System.out.println("Type validation errors: " + type_validation_errors);
System.out.println("Field validation errors: " + field_validation_errors);
System.out.println("Method validation errors: " + method_validation_errors);
int total_mapped = new_mappings.typeCount() + new_mappings.fieldCount() + new_mappings.methodCount();
int error_count = type_validation_errors + method_validation_errors + field_validation_errors;
System.out.printf("Accuracy: %.2f%%\n", (1 - (error_count / (float) total_mapped)) * 100);
} else {
System.out.println("Mapped " + new_mappings.packagesCount() + " packages");
float type_percent = (new_mappings.typeCount() / (float) usage.getSeenTypes()) * 100.0f;
System.out.printf("Mapped %d/%d classes (%.2f%%)\n", new_mappings.typeCount(), usage.getSeenTypes(), type_percent);
float field_percent = (new_mappings.fieldCount() / (float) usage.getSeenFields()) * 100.0f;
System.out.printf("Mapped %d/%d fields (%.2f%%)\n", new_mappings.fieldCount(), usage.getSeenFields(), field_percent);
float method_percent = (new_mappings.methodCount() / (float) usage.getSeenMethods()) * 100.0f;
System.out.printf("Mapped %d/%d methods (%.2f%%)\n", new_mappings.methodCount(), usage.getSeenMethods(), method_percent);
}
if (output_unmatched) {
List<String> unmatched_types = new ArrayList<>();
for (String obf : old_mappings.getMappedTypes()) {
if (!usage.sawType(obf)) {
continue;
}
String mapped = old_mappings.mapType(obf);
if (TypeHelper.isAnonClass(mapped)) {
continue;
}
String new_obf = new_mappings.inverseType(mapped);
if (new_obf == null) {
unmatched_types.add(mapped);
}
}
Collections.sort(unmatched_types);
Path unmatched_out_path = root.resolve("unmatched.txt");
System.out.println("Outputting unmatched types to " + unmatched_out_path.toAbsolutePath().toString());
try (PrintWriter writer = new PrintWriter(unmatched_out_path.toFile())) {
for (String type : unmatched_types) {
writer.println(type);
}
}
}
UnknownTypeMapper unknown_type = new UnknownTypeMapper(engine, new_mappings, previous);
new_sourceset.accept(unknown_type);
UnknownMemberMapper unknown = new UnknownMemberMapper(new_mappings, engine, previous);
new_sourceset.accept(unknown);
Path mappings_out = root.resolve(output_mappings);
MappingsIO.write(mappings_out.toAbsolutePath(), new_mappings, unknown.getNext());
}
}
| |
/*******************************************************************************
* Copyright 2012 - Joakim Erdfelt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package net.erdfelt.android.sdkfido.ui;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.util.logging.Logger;
import javax.swing.ActionMap;
import javax.swing.ButtonGroup;
import javax.swing.InputMap;
import javax.swing.JComponent;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JRadioButtonMenuItem;
import javax.swing.JTabbedPane;
import javax.swing.KeyStroke;
import javax.swing.LookAndFeel;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
import javax.swing.UIManager.LookAndFeelInfo;
import javax.swing.UnsupportedLookAndFeelException;
import net.erdfelt.android.sdkfido.Build;
import net.erdfelt.android.sdkfido.ui.actions.ActionMapper;
import net.erdfelt.android.sdkfido.ui.actions.ActionTarget;
import net.erdfelt.android.sdkfido.ui.actions.KeyAction;
import net.erdfelt.android.sdkfido.ui.layout.GBC;
import net.erdfelt.android.sdkfido.ui.panels.ConsoleView;
import net.erdfelt.android.sdkfido.ui.panels.SdkFetchPanel;
import net.erdfelt.android.sdkfido.ui.panels.WorkDirPanel;
import org.apache.commons.lang.SystemUtils;
public class SdkFidoFrame extends JFrame {
private static final long serialVersionUID = 1L;
private static final Logger LOG = Logger.getLogger(SdkFidoFrame.class.getName());
private ActionMapper actionMapper;
private WindowHandler winhandler;
private ConsoleView console;
public SdkFidoFrame() {
LOG.info("SdkFidoFrame Start");
initgui();
}
private void initgui() {
setName("SdkFidoFrame");
this.winhandler = new WindowHandler(this, true);
this.winhandler.setPersistLocation(true);
this.winhandler.setPersistSize(true);
this.actionMapper = new ActionMapper(this);
String lnf = Prefs.getString("looknfeel", UIManager.getCrossPlatformLookAndFeelClassName());
setLookAndFeel(lnf);
setTitle("SDKFido - " + Build.getVersion());
getContentPane().setLayout(new BorderLayout());
enableExitKey();
// Menu Bar
setJMenuBar(createMainMenu());
Container content = getContentPane();
content.add(BorderLayout.CENTER, createBody());
pack();
Dimension minDim = getPreferredSize();
setMinimumSize(minDim);
setDefaultCloseOperation(DISPOSE_ON_CLOSE);
this.winhandler.setSizePreferred(new Dimension(400, 400));
addWindowListener(this.winhandler);
}
private Component createBody() {
Container body = new Container();
body.setLayout(new GridBagLayout());
JTabbedPane tabs = new JTabbedPane();
tabs.addTab("SDKs", createSdkPanel());
tabs.addTab("Work Dir", createWorkDirPanel());
body.add(tabs, new GBC().fillWide().margin(5, 5, 5, 5).endRow());
body.add(createConsolePane(), new GBC().fillBoth().margin(0, 5, 5, 5).weightTall(1.0).endBoth());
return body;
}
private ConsoleView createConsolePane() {
console = new ConsoleView();
return console;
}
private Component createWorkDirPanel() {
WorkDirPanel workDirPanel = new WorkDirPanel();
return workDirPanel;
}
private Component createSdkPanel() {
SdkFetchPanel sdkPanel = new SdkFetchPanel();
return sdkPanel;
}
private JMenuBar createMainMenu() {
JMenuBar mainMenu = new JMenuBar();
mainMenu.add(createFileMenu());
mainMenu.add(createViewMenu());
return mainMenu;
}
private JMenu createFileMenu() {
JMenu fileMenu = new JMenu("File");
fileMenu.setMnemonic('f');
JMenuItem fileExit = new JMenuItem("Exit");
fileExit.setMnemonic('x');
fileExit.setActionCommand("exit");
fileExit.addActionListener(actionMapper);
fileMenu.add(fileExit);
return fileMenu;
}
private JMenu createViewMenu() {
JMenu viewMenu = new JMenu("View");
viewMenu.setMnemonic('v');
JMenu lnfMenu = new JMenu("Look and Feel");
lnfMenu.setMnemonic('f');
ButtonGroup lnfGroup = new ButtonGroup();
LookAndFeelInfo lnfs[] = UIManager.getInstalledLookAndFeels();
String lnfCurrentName = null;
LookAndFeel lnfCurrent = UIManager.getLookAndFeel();
if (lnfCurrent != null) {
lnfCurrentName = lnfCurrent.getClass().getName();
}
UISwitcher switcher = new UISwitcher();
for (int i = 0; i < lnfs.length; i++) {
JRadioButtonMenuItem lnfItem = new JRadioButtonMenuItem(lnfs[i].getName());
lnfItem.addActionListener(switcher);
lnfItem.setActionCommand(lnfs[i].getClassName());
lnfGroup.add(lnfItem);
lnfMenu.add(lnfItem);
if (lnfs[i].getClassName().equals(lnfCurrentName)) {
lnfGroup.setSelected(lnfItem.getModel(), true);
}
}
viewMenu.add(lnfMenu);
return viewMenu;
}
private void enableExitKey() {
InputMap rootInput = getRootPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW);
ActionMap rootAction = getRootPane().getActionMap();
if (SystemUtils.IS_OS_UNIX || SystemUtils.IS_OS_WINDOWS) {
rootInput.put(KeyStroke.getKeyStroke(KeyEvent.VK_F4, InputEvent.CTRL_DOWN_MASK), "exit");
rootInput.put(KeyStroke.getKeyStroke(KeyEvent.VK_Q, InputEvent.CTRL_DOWN_MASK), "exit");
}
if (SystemUtils.IS_OS_MAC) {
rootInput.put(KeyStroke.getKeyStroke(KeyEvent.VK_Q, InputEvent.META_DOWN_MASK), "exit");
}
rootAction.put("exit", new KeyAction(actionMapper, "exit"));
}
@ActionTarget(name = "exit")
public void doExit(ActionEvent event) {
winhandler.close();
}
@ActionTarget(name = "gitupdate")
public void doGitUpdate(ActionEvent event) {
// TODO
TODO("Implement Git Update Action");
}
private void TODO(String msg) {
LOG.warning("!TODO! - " + msg);
}
@Override
public void setVisible(boolean b) {
super.setVisible(b);
console.attachLogger();
}
public void setLookAndFeel(String uiclassname) {
try {
UIManager.setLookAndFeel(uiclassname);
SwingUtilities.updateComponentTreeUI(this);
Prefs.setString("looknfeel", uiclassname);
Prefs.save();
} catch (ClassNotFoundException e1) {
LOG.warning("Unable to set Look and Feel (it is missing).");
} catch (InstantiationException e1) {
LOG.warning("Unable to set Look and Feel (cannot be instantiated by JRE).");
} catch (IllegalAccessException e1) {
LOG.warning("Unable to set Look and Feel (cannot be used by JRE).");
} catch (UnsupportedLookAndFeelException e1) {
LOG.warning("Unable to set Look and Feel (not supported by JRE).");
}
}
public class UISwitcher implements ActionListener {
public void actionPerformed(ActionEvent e) {
if (e.getSource() instanceof JRadioButtonMenuItem) {
setLookAndFeel(e.getActionCommand());
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.authentication.NoHdfsAuthentication;
import com.facebook.presto.hive.metastore.Column;
import com.facebook.presto.hive.metastore.StorageFormat;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.predicate.Domain;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.testing.TestingConnectorSession;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.stats.CounterStat;
import io.airlift.units.DataSize;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.util.Progressable;
import org.testng.annotations.Test;
import java.net.URI;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.Executor;
import static com.facebook.presto.hive.BackgroundHiveSplitLoader.BucketSplitInfo.createBucketSplitInfo;
import static com.facebook.presto.hive.HiveBucketing.HiveBucket;
import static com.facebook.presto.hive.HiveColumnHandle.pathColumnHandle;
import static com.facebook.presto.hive.HiveTestUtils.SESSION;
import static com.facebook.presto.hive.HiveType.HIVE_INT;
import static com.facebook.presto.hive.HiveType.HIVE_STRING;
import static com.facebook.presto.hive.HiveUtil.getRegularColumnHandles;
import static com.facebook.presto.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED;
import static com.facebook.presto.spi.predicate.TupleDomain.withColumnDomains;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.slice.Slices.utf8Slice;
import static io.airlift.units.DataSize.Unit.GIGABYTE;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertThrows;
public class TestBackgroundHiveSplitLoader
{
private static final int BUCKET_COUNT = 2;
private static final String SAMPLE_PATH = "hdfs://VOL1:9000/db_name/table_name/000000_0";
private static final String SAMPLE_PATH_FILTERED = "hdfs://VOL1:9000/db_name/table_name/000000_1";
private static final Path RETURNED_PATH = new Path(SAMPLE_PATH);
private static final Path FILTERED_PATH = new Path(SAMPLE_PATH_FILTERED);
private static final Executor EXECUTOR = newCachedThreadPool(daemonThreadsNamed("test-%s"));
private static final TupleDomain<HiveColumnHandle> RETURNED_PATH_DOMAIN = withColumnDomains(
ImmutableMap.of(
pathColumnHandle(),
Domain.singleValue(VARCHAR, utf8Slice(RETURNED_PATH.toString()))));
private static final List<LocatedFileStatus> TEST_FILES = ImmutableList.of(
locatedFileStatus(RETURNED_PATH),
locatedFileStatus(FILTERED_PATH));
private static final List<Column> PARTITION_COLUMNS = ImmutableList.of(
new Column("partitionColumn", HIVE_INT, Optional.empty()));
private static final Optional<HiveBucketProperty> BUCKET_PROPERTY = Optional.of(
new HiveBucketProperty(ImmutableList.of("col1"), BUCKET_COUNT));
private static final Table SIMPLE_TABLE = table(ImmutableList.of(), Optional.empty());
private static final Table PARTITIONED_TABLE = table(PARTITION_COLUMNS, BUCKET_PROPERTY);
@Test
public void testNoPathFilter()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
TEST_FILES,
TupleDomain.none());
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader, TupleDomain.none());
backgroundHiveSplitLoader.start(hiveSplitSource);
assertEquals(drain(hiveSplitSource).size(), 2);
}
@Test
public void testPathFilter()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
TEST_FILES,
RETURNED_PATH_DOMAIN);
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader, RETURNED_PATH_DOMAIN);
backgroundHiveSplitLoader.start(hiveSplitSource);
List<String> paths = drain(hiveSplitSource);
assertEquals(paths.size(), 1);
assertEquals(paths.get(0), RETURNED_PATH.toString());
}
@Test
public void testPathFilterOneBucketMatchPartitionedTable()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
TEST_FILES,
RETURNED_PATH_DOMAIN,
ImmutableList.of(
new HiveBucket(0, BUCKET_COUNT),
new HiveBucket(1, BUCKET_COUNT)),
PARTITIONED_TABLE,
Optional.empty());
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader, RETURNED_PATH_DOMAIN);
backgroundHiveSplitLoader.start(hiveSplitSource);
List<String> paths = drain(hiveSplitSource);
assertEquals(paths.size(), 1);
assertEquals(paths.get(0), RETURNED_PATH.toString());
}
@Test
public void testPathFilterBucketedPartitionedTable()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
TEST_FILES,
RETURNED_PATH_DOMAIN,
ImmutableList.of(),
PARTITIONED_TABLE,
Optional.of(
new HiveBucketHandle(
getRegularColumnHandles(PARTITIONED_TABLE),
BUCKET_COUNT)));
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader, RETURNED_PATH_DOMAIN);
backgroundHiveSplitLoader.start(hiveSplitSource);
List<String> paths = drain(hiveSplitSource);
assertEquals(paths.size(), 1);
assertEquals(paths.get(0), RETURNED_PATH.toString());
}
@Test
public void testEmptyFileWithNoBlocks()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
ImmutableList.of(locatedFileStatusWithNoBlocks(RETURNED_PATH)),
TupleDomain.none());
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader, TupleDomain.none());
backgroundHiveSplitLoader.start(hiveSplitSource);
List<HiveSplit> splits = drainSplits(hiveSplitSource);
assertEquals(splits.size(), 1);
assertEquals(splits.get(0).getPath(), RETURNED_PATH.toString());
assertEquals(splits.get(0).getLength(), 0);
}
@Test
public void testNoHangIfPartitionIsOffline()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoaderOfflinePartitions();
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader, TupleDomain.all());
backgroundHiveSplitLoader.start(hiveSplitSource);
assertThrows(RuntimeException.class, () -> drain(hiveSplitSource));
assertThrows(RuntimeException.class, () -> hiveSplitSource.isFinished());
}
private static List<String> drain(HiveSplitSource source)
throws Exception
{
return drainSplits(source).stream()
.map(HiveSplit::getPath)
.collect(toImmutableList());
}
private static List<HiveSplit> drainSplits(HiveSplitSource source)
throws Exception
{
ImmutableList.Builder<HiveSplit> splits = ImmutableList.builder();
while (!source.isFinished()) {
source.getNextBatch(NOT_PARTITIONED, 100).get()
.getSplits().stream()
.map(HiveSplit.class::cast)
.forEach(splits::add);
}
return splits.build();
}
private static BackgroundHiveSplitLoader backgroundHiveSplitLoader(
List<LocatedFileStatus> files,
TupleDomain<HiveColumnHandle> tupleDomain)
{
return backgroundHiveSplitLoader(
files,
tupleDomain,
ImmutableList.of(),
SIMPLE_TABLE,
Optional.empty());
}
private static BackgroundHiveSplitLoader backgroundHiveSplitLoader(
List<LocatedFileStatus> files,
TupleDomain<HiveColumnHandle> compactEffectivePredicate,
List<HiveBucket> hiveBuckets,
Table table,
Optional<HiveBucketHandle> bucketHandle)
{
List<HivePartitionMetadata> hivePartitionMetadatas =
ImmutableList.of(
new HivePartitionMetadata(
new HivePartition(new SchemaTableName("testSchema", "table_name"), ImmutableList.of()),
Optional.empty(),
ImmutableMap.of()));
ConnectorSession connectorSession = new TestingConnectorSession(
new HiveSessionProperties(new HiveClientConfig().setMaxSplitSize(new DataSize(1.0, GIGABYTE)), new OrcFileWriterConfig()).getSessionProperties());
return new BackgroundHiveSplitLoader(
table,
hivePartitionMetadatas,
compactEffectivePredicate,
createBucketSplitInfo(bucketHandle, hiveBuckets),
connectorSession,
new TestingHdfsEnvironment(),
new NamenodeStats(),
new TestingDirectoryLister(files),
EXECUTOR,
2,
false);
}
private static BackgroundHiveSplitLoader backgroundHiveSplitLoaderOfflinePartitions()
{
ConnectorSession connectorSession = new TestingConnectorSession(
new HiveSessionProperties(new HiveClientConfig().setMaxSplitSize(new DataSize(1.0, GIGABYTE)), new OrcFileWriterConfig()).getSessionProperties());
return new BackgroundHiveSplitLoader(
SIMPLE_TABLE,
createPartitionMetadataWithOfflinePartitions(),
TupleDomain.all(),
createBucketSplitInfo(Optional.empty(), ImmutableList.of()),
connectorSession,
new TestingHdfsEnvironment(),
new NamenodeStats(),
new TestingDirectoryLister(TEST_FILES),
directExecutor(),
2,
false);
}
private static Iterable<HivePartitionMetadata> createPartitionMetadataWithOfflinePartitions()
throws RuntimeException
{
return () -> new AbstractIterator<HivePartitionMetadata>()
{
// This iterator is crafted to return a valid partition for the first calls to
// hasNext() and next(), and then it should throw for the second call to hasNext()
private int position = -1;
@Override
protected HivePartitionMetadata computeNext()
{
position++;
switch (position) {
case 0:
return new HivePartitionMetadata(
new HivePartition(new SchemaTableName("testSchema", "table_name"), ImmutableList.of()),
Optional.empty(),
ImmutableMap.of());
case 1:
throw new RuntimeException("OFFLINE");
default:
return endOfData();
}
}
};
}
private static HiveSplitSource hiveSplitSource(
BackgroundHiveSplitLoader backgroundHiveSplitLoader,
TupleDomain<HiveColumnHandle> compactEffectivePredicate)
{
return HiveSplitSource.allAtOnce(
SESSION,
SIMPLE_TABLE.getDatabaseName(),
SIMPLE_TABLE.getTableName(),
compactEffectivePredicate,
1,
1,
new DataSize(32, MEGABYTE),
backgroundHiveSplitLoader,
EXECUTOR,
new CounterStat());
}
private static Table table(
List<Column> partitionColumns,
Optional<HiveBucketProperty> bucketProperty)
{
Table.Builder tableBuilder = Table.builder();
tableBuilder.getStorageBuilder()
.setStorageFormat(
StorageFormat.create(
"com.facebook.hive.orc.OrcSerde",
"org.apache.hadoop.hive.ql.io.RCFileInputFormat",
"org.apache.hadoop.hive.ql.io.RCFileInputFormat"))
.setLocation("hdfs://VOL1:9000/db_name/table_name")
.setSkewed(false)
.setBucketProperty(bucketProperty)
.setSorted(false);
return tableBuilder
.setDatabaseName("test_dbname")
.setOwner("testOwner")
.setTableName("test_table")
.setTableType(TableType.MANAGED_TABLE.toString())
.setDataColumns(ImmutableList.of(new Column("col1", HIVE_STRING, Optional.empty())))
.setParameters(ImmutableMap.of())
.setPartitionColumns(partitionColumns)
.build();
}
private static LocatedFileStatus locatedFileStatus(Path path)
{
return new LocatedFileStatus(
0L,
false,
0,
0L,
0L,
0L,
null,
null,
null,
null,
path,
new BlockLocation[] {new BlockLocation()});
}
private static LocatedFileStatus locatedFileStatusWithNoBlocks(Path path)
{
return new LocatedFileStatus(
0L,
false,
0,
0L,
0L,
0L,
null,
null,
null,
null,
path,
new BlockLocation[] {});
}
private static class TestingDirectoryLister
implements DirectoryLister
{
private final List<LocatedFileStatus> files;
public TestingDirectoryLister(List<LocatedFileStatus> files)
{
this.files = files;
}
@Override
public RemoteIterator<LocatedFileStatus> list(FileSystem fs, Path path)
{
return new RemoteIterator<LocatedFileStatus>()
{
private final Iterator<LocatedFileStatus> iterator = files.iterator();
@Override
public boolean hasNext()
{
return iterator.hasNext();
}
@Override
public LocatedFileStatus next()
{
return iterator.next();
}
};
}
}
private static class TestingHdfsEnvironment
extends HdfsEnvironment
{
public TestingHdfsEnvironment()
{
super(
new HiveHdfsConfiguration(new HdfsConfigurationUpdater(new HiveClientConfig())),
new HiveClientConfig(),
new NoHdfsAuthentication());
}
@Override
public FileSystem getFileSystem(String user, Path path, Configuration configuration)
{
return new TestingHdfsFileSystem();
}
}
private static class TestingHdfsFileSystem
extends FileSystem
{
@Override
public boolean delete(Path f, boolean recursive)
{
throw new UnsupportedOperationException();
}
@Override
public boolean rename(Path src, Path dst)
{
throw new UnsupportedOperationException();
}
@Override
public void setWorkingDirectory(Path dir)
{
throw new UnsupportedOperationException();
}
@Override
public FileStatus[] listStatus(Path f)
{
throw new UnsupportedOperationException();
}
@Override
public FSDataOutputStream create(
Path f,
FsPermission permission,
boolean overwrite,
int bufferSize,
short replication,
long blockSize,
Progressable progress)
{
throw new UnsupportedOperationException();
}
@Override
public boolean mkdirs(Path f, FsPermission permission)
{
throw new UnsupportedOperationException();
}
@Override
public FSDataOutputStream append(Path f, int bufferSize, Progressable progress)
{
throw new UnsupportedOperationException();
}
@Override
public FSDataInputStream open(Path f, int bufferSize)
{
throw new UnsupportedOperationException();
}
@Override
public FileStatus getFileStatus(Path f)
{
throw new UnsupportedOperationException();
}
@Override
public Path getWorkingDirectory()
{
throw new UnsupportedOperationException();
}
@Override
public URI getUri()
{
throw new UnsupportedOperationException();
}
}
}
| |
/**
* File FBSolver.java
*
* This file is part of the jSAM project 2014.
*
* Copyright 2014 Coen van Leeuwen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package nl.coenvl.sam.solvers;
import java.util.ArrayList;
import nl.coenvl.sam.MailMan;
import nl.coenvl.sam.agents.LinkedAgent;
import nl.coenvl.sam.exceptions.InvalidValueException;
import nl.coenvl.sam.messages.HashMessage;
import nl.coenvl.sam.messages.Message;
import nl.coenvl.sam.variables.AssignmentMap;
import nl.coenvl.sam.variables.DiscreteVariable;
/**
* FBSolver
*
* @author leeuwencjv
* @version 0.1
* @since 19 mrt. 2014
*
*/
@Deprecated
public class FBSolver<V> implements Solver {
public static final String CPA_MSG = "FBSolver:CPA_MSG";
public static final String NEW_SOLUTION = "FBSolver:NEW_SOLUTION";
public static final String TERMINATE = "FBSolver:TERMINATE";
private V bestValue;
private final AssignmentMap<V> context;
private final ArrayList<V> exploredValues;
private final DiscreteVariable<V> myVariable;
private final LinkedAgent<? extends DiscreteVariable<V>, V> parent;
private double pastCost;
/* These are more implementation specific */
private volatile double upperBound;
/**
* @param agent
*/
public FBSolver(final LinkedAgent<? extends DiscreteVariable<V>, V> agent) {
this.parent = agent;
this.myVariable = this.parent.getVariable();
this.upperBound = Double.MAX_VALUE;
this.exploredValues = new ArrayList<>();
this.context = new AssignmentMap<>();
}
/**
*
*/
private void assign_CPA() {
final AssignmentMap<V> pa = this.context.clone();
V assignment = null;
double paCost = Double.MAX_VALUE;
for (final V iterAssignment : this.myVariable) {
if (this.exploredValues.contains(iterAssignment)) {
continue;
}
pa.setAssignment(this.myVariable, iterAssignment);
final double iterCost = this.pastCost + this.parent.getLocalCostIf(pa);
if ((iterCost < paCost) && (iterCost < this.upperBound)) {
paCost = iterCost;
assignment = iterAssignment;
}
}
if ((assignment == null) || (paCost >= this.upperBound)) {
// No new solution found backtrack...
this.backtrack();
} else {
// Assign this agent with the new value
// myProblemContext.setValue(assignment);
this.context.setAssignment(this.myVariable, assignment);
this.exploredValues.add(assignment);
// Forward the current assignment to the next child, or broadcast
// new solution if there is none
if (this.parent.next() == null) {
final Message msg = new HashMessage(this.myVariable.getID(), FBSolver.NEW_SOLUTION);
msg.put("pa", this.context);
msg.put("paCost", paCost);
MailMan.broadCast(msg);
this.bestValue = assignment;
this.upperBound = paCost;
this.backtrack();
} else {
final Message msg = new HashMessage(this.myVariable.getID(), FBSolver.CPA_MSG);
msg.put("pa", this.context);
msg.put("paCost", paCost);
MailMan.sendMessage(this.parent.next(), msg);
}
}
}
/**
*
*/
private void backtrack() {
if (this.parent.prev() == null) {
MailMan.broadCast(new HashMessage(this.myVariable.getID(), FBSolver.TERMINATE));
} else {
MailMan.sendMessage(this.parent.prev(), new HashMessage(this.myVariable.getID(), FBSolver.CPA_MSG));
}
}
/*
* (non-Javadoc)
*
* @see nl.coenvl.sam.Agent#init()
*/
@Override
public void init() {
if (this.parent.prev() == null) {
this.assign_CPA();
}
}
/*
* (non-Javadoc)
*
* @see org.anon.cocoa.Solver#push(org.anon.cocoa.Message)
*/
@Override
public void push(final Message msg) {
// Based on the received message we do different stuff
if (msg.getType().equals(FBSolver.CPA_MSG)) {
if (msg.containsKey("pa")) {
@SuppressWarnings("unchecked")
final AssignmentMap<V> cpa = (AssignmentMap<V>) msg.get("pa");
this.context.putAll(cpa);
}
// Check to see if it is a new branch we need to research instead of
// backtrack
if (msg.containsKey("paCost")) {
this.pastCost = (Double) msg.get("paCost");
this.context.removeAssignment(this.myVariable);
this.exploredValues.clear();
}
final double cost = this.parent.getLocalCostIf(this.context);
if (cost >= this.upperBound) {
this.backtrack();
} else {
this.assign_CPA();
}
} else if (msg.getType().equals(FBSolver.TERMINATE)) {
try {
this.myVariable.setValue(this.bestValue);
} catch (final InvalidValueException e) {
e.printStackTrace();
}
} else if (msg.getType().equals(FBSolver.NEW_SOLUTION)) {
@SuppressWarnings("unchecked")
final AssignmentMap<V> solution = (AssignmentMap<V>) msg.get("pa");
this.bestValue = solution.getAssignment(this.myVariable);
this.upperBound = (Double) msg.get("paCost");
} else {
System.err.println("Unexpected message of type " + msg.getType());
}
}
/*
* (non-Javadoc)
*
* @see nl.coenvl.sam.Solver#reset()
*/
@Override
public void reset() {
this.myVariable.clear();
this.upperBound = Double.MAX_VALUE;
this.exploredValues.clear();
this.context.clear();
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.service.user;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Sets;
import com.mojang.authlib.GameProfile;
import net.minecraft.nbt.CompressedStreamTools;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.management.BanEntry;
import net.minecraft.server.management.PlayerProfileCache;
import net.minecraft.server.management.ServerConfigurationManager;
import net.minecraft.server.management.UserListBans;
import net.minecraft.server.management.UserListBansEntry;
import net.minecraft.server.management.UserListWhitelist;
import net.minecraft.server.management.UserListWhitelistEntry;
import net.minecraft.world.storage.SaveHandler;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.entity.living.player.User;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.entity.player.SpongeUser;
import org.spongepowered.common.interfaces.entity.player.IMixinEntityPlayerMP;
import org.spongepowered.common.world.DimensionManager;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.Locale;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
class UserDiscoverer {
private static final Cache<UUID, User> userCache = CacheBuilder.newBuilder()
.expireAfterAccess(1, TimeUnit.DAYS)
.build();
static User create(GameProfile profile) {
User user = (User) new SpongeUser(profile);
userCache.put(profile.getId(), user);
return user;
}
/**
* Searches for user data from a variety of places, in order of preference.
* A user that has data in sponge may not necessarily have been online
* before. A user added to the ban/whitelist that has not been on the server
* before should be discover-able.
*
* @param uniqueId The user's UUID
* @return The user data, or null if not found
*/
static User findByUuid(UUID uniqueId) {
User user = userCache.getIfPresent(uniqueId);
if (user != null) {
return user;
}
user = getOnlinePlayer(uniqueId);
if (user != null) {
return user;
}
user = getFromStoredData(uniqueId);
if (user != null) {
return user;
}
user = getFromWhitelist(uniqueId);
if (user != null) {
return user;
}
user = getFromBanlist(uniqueId);
return user;
}
static User findByUsername(String username) {
PlayerProfileCache cache = MinecraftServer.getServer().getPlayerProfileCache();
HashSet<String> names = Sets.newHashSet(cache.getUsernames());
if (names.contains(username.toLowerCase(Locale.ROOT))) {
GameProfile profile = cache.getGameProfileForUsername(username);
if (profile != null) {
return findByUuid(profile.getId());
}
}
return null;
}
static Collection<org.spongepowered.api.profile.GameProfile> getAllProfiles() {
Set<org.spongepowered.api.profile.GameProfile> profiles = Sets.newHashSet();
// Add all cached profiles
for (User user : userCache.asMap().values()) {
profiles.add(user.getProfile());
}
// Add all known profiles from the data files
SaveHandler saveHandler = (SaveHandler) DimensionManager.getWorldFromDimId(0).getSaveHandler();
String[] uuids = saveHandler.getAvailablePlayerDat();
for (String playerUuid : uuids) {
// Some mods store other files in the 'playerdata' folder, so
// we need to ensure that the filename is a valid UUID
if (playerUuid.split("-").length != 5) {
continue;
}
GameProfile profile = MinecraftServer.getServer().getPlayerProfileCache().getProfileByUUID(UUID.fromString(playerUuid));
if (profile != null) {
profiles.add((org.spongepowered.api.profile.GameProfile) profile);
}
}
// Add all whitelisted users
UserListWhitelist whiteList = MinecraftServer.getServer().getConfigurationManager().getWhitelistedPlayers();
for (UserListWhitelistEntry entry : whiteList.getValues().values()) {
profiles.add((org.spongepowered.api.profile.GameProfile) entry.value);
}
// Add all banned users
UserListBans banList = MinecraftServer.getServer().getConfigurationManager().getBannedPlayers();
for (UserListBansEntry entry : banList.getValues().values()) {
profiles.add((org.spongepowered.api.profile.GameProfile) entry.value);
}
return profiles;
}
static boolean delete(UUID uniqueId) {
if (getOnlinePlayer(uniqueId) != null) {
// Don't delete online player's data
return false;
}
boolean success = deleteStoredPlayerData(uniqueId);
success = success && deleteWhitelistEntry(uniqueId);
success = success && deleteBanlistEntry(uniqueId);
return success;
}
private static User getOnlinePlayer(UUID uniqueId) {
ServerConfigurationManager confMgr = MinecraftServer.getServer().getConfigurationManager();
if (confMgr == null) { // Server not started yet
return null;
}
// Although the player itself could be returned here (as Player extends
// User), a plugin is more likely to cache the User object and we don't
// want the player entity to be cached.
IMixinEntityPlayerMP player = (IMixinEntityPlayerMP) confMgr.getPlayerByUUID(uniqueId);
if (player != null) {
User user = player.getUserObject();
userCache.put(uniqueId, user);
return user;
}
return null;
}
private static User getFromStoredData(UUID uniqueId) {
// Note: Uses the overworld's player data
File dataFile = getPlayerDataFile(uniqueId);
if (dataFile == null) {
return null;
}
Optional<org.spongepowered.api.profile.GameProfile> profile = getProfileFromServer(uniqueId);
if (profile.isPresent()) {
User user = create((GameProfile) profile.get());
try {
((SpongeUser) user).readFromNbt(CompressedStreamTools.readCompressed(new FileInputStream(dataFile)));
} catch (IOException e) {
SpongeImpl.getLogger().warn("Corrupt user file {}", dataFile, e);
}
return user;
} else {
return null;
}
}
private static Optional<org.spongepowered.api.profile.GameProfile> getProfileFromServer(UUID uuid) {
CompletableFuture<org.spongepowered.api.profile.GameProfile> gameProfile = Sponge.getServer().getGameProfileManager().get(uuid);
try {
org.spongepowered.api.profile.GameProfile profile = gameProfile.get();
if (profile != null) {
return Optional.of(profile);
} else {
return Optional.empty();
}
} catch (InterruptedException | ExecutionException e) {
SpongeImpl.getLogger().warn("Error while getting profile for {}", uuid, e);
return Optional.empty();
}
}
private static User getFromWhitelist(UUID uniqueId) {
GameProfile profile = null;
UserListWhitelist whiteList = MinecraftServer.getServer().getConfigurationManager().getWhitelistedPlayers();
UserListWhitelistEntry whiteListData = whiteList.getEntry(new GameProfile(uniqueId, ""));
if (whiteListData != null) {
profile = whiteListData.value;
}
if (profile != null) {
return create(profile);
}
return null;
}
private static User getFromBanlist(UUID uniqueId) {
GameProfile profile = null;
UserListBans banList = MinecraftServer.getServer().getConfigurationManager().getBannedPlayers();
UserListBansEntry banData = banList.getEntry(new GameProfile(uniqueId, ""));
if (banData != null) {
profile = banData.value;
}
if (profile != null) {
return create(profile);
}
return null;
}
private static File getPlayerDataFile(UUID uniqueId) {
// Note: Uses the overworld's player data
SaveHandler saveHandler = (SaveHandler) DimensionManager.getWorldFromDimId(0).getSaveHandler();
String[] uuids = saveHandler.getAvailablePlayerDat();
for (String playerUuid : uuids) {
if (uniqueId.toString().equals(playerUuid)) {
return new File(saveHandler.playersDirectory, playerUuid + ".dat");
}
}
return null;
}
private static boolean deleteStoredPlayerData(UUID uniqueId) {
File dataFile = getPlayerDataFile(uniqueId);
if (dataFile != null) {
try {
return dataFile.delete();
} catch (SecurityException e) {
SpongeImpl.getLogger().warn("Unable to delete file {} due to a security error", dataFile, e);
return false;
}
}
return true;
}
private static boolean deleteWhitelistEntry(UUID uniqueId) {
UserListWhitelist whiteList = MinecraftServer.getServer().getConfigurationManager().getWhitelistedPlayers();
whiteList.removeEntry(new GameProfile(uniqueId, ""));
return true;
}
private static boolean deleteBanlistEntry(UUID uniqueId) {
UserListBans banList = MinecraftServer.getServer().getConfigurationManager().getBannedPlayers();
banList.removeEntry(new GameProfile(uniqueId, ""));
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.iterators.system;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
import org.apache.accumulo.core.iteratorsImpl.system.DeletingIterator;
import org.apache.accumulo.core.iteratorsImpl.system.DeletingIterator.Behavior;
import org.apache.accumulo.core.iteratorsImpl.system.SortedMapIterator;
import org.apache.hadoop.io.Text;
import org.junit.Test;
public class DeletingIteratorTest {
private static final Collection<ByteSequence> EMPTY_COL_FAMS = new ArrayList<>();
@Test
public void test1() {
Text colf = new Text("a");
Text colq = new Text("b");
Value dvOld = new Value("old");
Value dvDel = new Value("old");
Value dvNew = new Value("new");
TreeMap<Key,Value> tm = new TreeMap<>();
Key k;
for (int i = 0; i < 2; i++) {
for (long j = 0; j < 5; j++) {
k = new Key(new Text(String.format("%03d", i)), colf, colq, j);
tm.put(k, dvOld);
}
}
k = new Key(new Text(String.format("%03d", 0)), colf, colq, 5);
k.setDeleted(true);
tm.put(k, dvDel);
for (int i = 0; i < 2; i++) {
for (long j = 6; j < 11; j++) {
k = new Key(new Text(String.format("%03d", i)), colf, colq, j);
tm.put(k, dvNew);
}
}
assertEquals("Initial size was " + tm.size(), 21, tm.size());
Text checkRow = new Text("000");
try {
SortedKeyValueIterator<Key,Value> it =
DeletingIterator.wrap(new SortedMapIterator(tm), false, Behavior.PROCESS);
it.seek(new Range(), EMPTY_COL_FAMS, false);
TreeMap<Key,Value> tmOut = new TreeMap<>();
while (it.hasTop()) {
tmOut.put(it.getTopKey(), it.getTopValue());
it.next();
}
assertEquals("size after no propagation was " + tmOut.size(), 15, tmOut.size());
for (Entry<Key,Value> e : tmOut.entrySet()) {
if (e.getKey().getRow().equals(checkRow)) {
byte[] b = e.getValue().get();
assertEquals('n', b[0]);
assertEquals('e', b[1]);
assertEquals('w', b[2]);
}
}
} catch (IOException e) {
fail();
}
try {
SortedKeyValueIterator<Key,Value> it =
DeletingIterator.wrap(new SortedMapIterator(tm), true, Behavior.PROCESS);
it.seek(new Range(), EMPTY_COL_FAMS, false);
TreeMap<Key,Value> tmOut = new TreeMap<>();
while (it.hasTop()) {
tmOut.put(it.getTopKey(), it.getTopValue());
it.next();
}
assertEquals("size after propagation was " + tmOut.size(), 16, tmOut.size());
for (Entry<Key,Value> e : tmOut.entrySet()) {
if (e.getKey().getRow().equals(checkRow)) {
byte[] b = e.getValue().get();
if (e.getKey().isDeleted()) {
assertEquals('o', b[0]);
assertEquals('l', b[1]);
assertEquals('d', b[2]);
} else {
assertEquals('n', b[0]);
assertEquals('e', b[1]);
assertEquals('w', b[2]);
}
}
}
} catch (IOException e) {
fail();
}
}
// seek test
@Test
public void test2() throws IOException {
TreeMap<Key,Value> tm = new TreeMap<>();
newKeyValue(tm, "r000", 4, false, "v4");
newKeyValue(tm, "r000", 3, false, "v3");
newKeyValue(tm, "r000", 2, true, "v2");
newKeyValue(tm, "r000", 1, false, "v1");
SortedKeyValueIterator<Key,Value> it =
DeletingIterator.wrap(new SortedMapIterator(tm), false, Behavior.PROCESS);
// SEEK two keys before delete
it.seek(newRange("r000", 4), EMPTY_COL_FAMS, false);
assertTrue(it.hasTop());
assertEquals(newKey("r000", 4), it.getTopKey());
assertEquals("v4", it.getTopValue().toString());
it.next();
assertTrue(it.hasTop());
assertEquals(newKey("r000", 3), it.getTopKey());
assertEquals("v3", it.getTopValue().toString());
it.next();
assertFalse(it.hasTop());
// SEEK passed delete
it.seek(newRange("r000", 1), EMPTY_COL_FAMS, false);
assertFalse(it.hasTop());
// SEEK to delete
it.seek(newRange("r000", 2), EMPTY_COL_FAMS, false);
assertFalse(it.hasTop());
// SEEK right before delete
it.seek(newRange("r000", 3), EMPTY_COL_FAMS, false);
assertTrue(it.hasTop());
assertEquals(newKey("r000", 3), it.getTopKey());
assertEquals("v3", it.getTopValue().toString());
it.next();
assertFalse(it.hasTop());
}
// test delete with same timestamp as existing key
@Test
public void test3() throws IOException {
TreeMap<Key,Value> tm = new TreeMap<>();
newKeyValue(tm, "r000", 3, false, "v3");
newKeyValue(tm, "r000", 2, false, "v2");
newKeyValue(tm, "r000", 2, true, "");
newKeyValue(tm, "r000", 1, false, "v1");
SortedKeyValueIterator<Key,Value> it =
DeletingIterator.wrap(new SortedMapIterator(tm), false, Behavior.PROCESS);
it.seek(new Range(), EMPTY_COL_FAMS, false);
assertTrue(it.hasTop());
assertEquals(newKey("r000", 3), it.getTopKey());
assertEquals("v3", it.getTopValue().toString());
it.next();
assertFalse(it.hasTop());
it.seek(newRange("r000", 2), EMPTY_COL_FAMS, false);
assertFalse(it.hasTop());
}
// test range inclusiveness
@Test
public void test4() throws IOException {
TreeMap<Key,Value> tm = new TreeMap<>();
newKeyValue(tm, "r000", 3, false, "v3");
newKeyValue(tm, "r000", 2, false, "v2");
newKeyValue(tm, "r000", 2, true, "");
newKeyValue(tm, "r000", 1, false, "v1");
SortedKeyValueIterator<Key,Value> it =
DeletingIterator.wrap(new SortedMapIterator(tm), false, Behavior.PROCESS);
it.seek(newRange("r000", 3), EMPTY_COL_FAMS, false);
assertTrue(it.hasTop());
assertEquals(newKey("r000", 3), it.getTopKey());
assertEquals("v3", it.getTopValue().toString());
it.next();
assertFalse(it.hasTop());
it.seek(newRange("r000", 3, false), EMPTY_COL_FAMS, false);
assertFalse(it.hasTop());
}
@Test
public void testFail() throws IOException {
TreeMap<Key,Value> tm = new TreeMap<>();
newKeyValue(tm, "r000", 3, false, "v3");
newKeyValue(tm, "r000", 2, false, "v2");
newKeyValue(tm, "r000", 2, true, "");
newKeyValue(tm, "r000", 1, false, "v1");
SortedKeyValueIterator<Key,Value> it =
DeletingIterator.wrap(new SortedMapIterator(tm), false, Behavior.FAIL);
it.seek(new Range(), EMPTY_COL_FAMS, false);
try {
while (it.hasTop()) {
it.getTopKey();
it.next();
}
fail();
} catch (IllegalStateException e) {}
}
private Range newRange(String row, long ts, boolean inclusive) {
return new Range(newKey(row, ts), inclusive, null, true);
}
private Range newRange(String row, long ts) {
return newRange(row, ts, true);
}
private Key newKey(String row, long ts) {
return new Key(new Text(row), ts);
}
private void newKeyValue(TreeMap<Key,Value> tm, String row, long ts, boolean deleted,
String val) {
Key k = newKey(row, ts);
k.setDeleted(deleted);
tm.put(k, new Value(val));
}
}
| |
package com.mikepenz.fastadapter.app;
import android.graphics.Color;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.Toolbar;
import android.support.v7.widget.helper.ItemTouchHelper;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Toast;
import com.mikepenz.fastadapter.FastAdapter;
import com.mikepenz.fastadapter.IAdapter;
import com.mikepenz.fastadapter.IItemAdapter;
import com.mikepenz.fastadapter.commons.adapters.FastItemAdapter;
import com.mikepenz.fastadapter.adapters.FooterAdapter;
import com.mikepenz.fastadapter.adapters.ItemAdapter.ItemFilterListener;
import com.mikepenz.fastadapter.app.items.SimpleItem;
import com.mikepenz.fastadapter_extensions.drag.ItemTouchCallback;
import com.mikepenz.fastadapter_extensions.drag.SimpleDragCallback;
import com.mikepenz.fastadapter_extensions.items.ProgressItem;
import com.mikepenz.fastadapter_extensions.scroll.EndlessRecyclerOnScrollListener;
import com.mikepenz.iconics.IconicsDrawable;
import com.mikepenz.material_design_iconic_typeface_library.MaterialDesignIconic;
import com.mikepenz.materialize.MaterializeBuilder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class EndlessScrollListActivity extends AppCompatActivity implements ItemTouchCallback, ItemFilterListener {
//save our FastAdapter
private FastItemAdapter<SimpleItem> fastItemAdapter;
private FooterAdapter<ProgressItem> footerAdapter;
//drag & drop
private SimpleDragCallback touchCallback;
private ItemTouchHelper touchHelper;
@Override
protected void onCreate(Bundle savedInstanceState) {
findViewById(android.R.id.content).setSystemUiVisibility(findViewById(android.R.id.content).getSystemUiVisibility() | View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_sample);
// Handle Toolbar
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
//style our ui
new MaterializeBuilder().withActivity(this).build();
//create our FastAdapter which will manage everything
fastItemAdapter = new FastItemAdapter<>();
fastItemAdapter.withSelectable(true);
//create our FooterAdapter which will manage the progress items
footerAdapter = new FooterAdapter<>();
//configure our fastAdapter
fastItemAdapter.withOnClickListener(new FastAdapter.OnClickListener<SimpleItem>() {
@Override
public boolean onClick(View v, IAdapter<SimpleItem> adapter, SimpleItem item, int position) {
Toast.makeText(v.getContext(), (item).name.getText(v.getContext()), Toast.LENGTH_LONG).show();
return false;
}
});
//configure the itemAdapter
fastItemAdapter.withFilterPredicate(new IItemAdapter.Predicate<SimpleItem>() {
@Override
public boolean filter(SimpleItem item, CharSequence constraint) {
//return true if we should filter it out
//return false to keep it
return !item.name.getText().toLowerCase().contains(constraint.toString().toLowerCase());
}
});
fastItemAdapter.getItemAdapter().withItemFilterListener(this);
//get our recyclerView and do basic setup
RecyclerView recyclerView = (RecyclerView) findViewById(R.id.rv);
recyclerView.setLayoutManager(new LinearLayoutManager(this));
recyclerView.setItemAnimator(new DefaultItemAnimator());
recyclerView.setAdapter(footerAdapter.wrap(fastItemAdapter));
recyclerView.addOnScrollListener(new EndlessRecyclerOnScrollListener(footerAdapter) {
@Override
public void onLoadMore(final int currentPage) {
footerAdapter.clear();
footerAdapter.add(new ProgressItem().withEnabled(false));
//simulate networking (2 seconds)
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
footerAdapter.clear();
for (int i = 1; i < 16; i++) {
fastItemAdapter.add(fastItemAdapter.getAdapterItemCount(), new SimpleItem().withName("Item " + i + " Page " + currentPage));
}
}
}, 2000);
}
});
//fill with some sample data (load the first page here)
List<SimpleItem> items = new ArrayList<>();
for (int i = 1; i < 16; i++) {
items.add(new SimpleItem().withName("Item " + i + " Page " + 1));
}
fastItemAdapter.add(items);
//add drag and drop for item
touchCallback = new SimpleDragCallback(this);
touchHelper = new ItemTouchHelper(touchCallback); // Create ItemTouchHelper and pass with parameter the SimpleDragCallback
touchHelper.attachToRecyclerView(recyclerView); // Attach ItemTouchHelper to RecyclerView
//restore selections (this has to be done after the items were added
fastItemAdapter.withSavedInstanceState(savedInstanceState);
//set the back arrow in the toolbar
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(false);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
//add the values which need to be saved from the adapter to the bundle
outState = fastItemAdapter.saveInstanceState(outState);
super.onSaveInstanceState(outState);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
//handle the click on the back arrow click
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu items for use in the action bar
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.search, menu);
//search icon
menu.findItem(R.id.search).setIcon(new IconicsDrawable(this, MaterialDesignIconic.Icon.gmi_search).color(Color.BLACK).actionBar());
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
final SearchView searchView = (SearchView) menu.findItem(R.id.search).getActionView();
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String s) {
touchCallback.setIsDragEnabled(false);
fastItemAdapter.filter(s);
return true;
}
@Override
public boolean onQueryTextChange(String s) {
fastItemAdapter.filter(s);
touchCallback.setIsDragEnabled(TextUtils.isEmpty(s));
return true;
}
});
} else {
menu.findItem(R.id.search).setVisible(false);
}
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean itemTouchOnMove(int oldPosition, int newPosition) {
Collections.swap(fastItemAdapter.getAdapterItems(), oldPosition, newPosition); // change position
fastItemAdapter.notifyAdapterItemMoved(oldPosition, newPosition);
return true;
}
@Override
public void itemsFiltered() {
Toast.makeText(EndlessScrollListActivity.this, "filtered items count: " + fastItemAdapter.getItemCount(), Toast.LENGTH_SHORT).show();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/bigtable/v2/data.proto
package com.google.bigtable.v2;
/**
* <pre>
* Specifies (some of) the contents of a single row/column intersection of a
* table.
* </pre>
*
* Protobuf type {@code google.bigtable.v2.Column}
*/
public final class Column extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.bigtable.v2.Column)
ColumnOrBuilder {
// Use Column.newBuilder() to construct.
private Column(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Column() {
qualifier_ = com.google.protobuf.ByteString.EMPTY;
cells_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private Column(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
qualifier_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
cells_ = new java.util.ArrayList<com.google.bigtable.v2.Cell>();
mutable_bitField0_ |= 0x00000002;
}
cells_.add(
input.readMessage(com.google.bigtable.v2.Cell.parser(), extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
cells_ = java.util.Collections.unmodifiableList(cells_);
}
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_Column_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_Column_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.v2.Column.class, com.google.bigtable.v2.Column.Builder.class);
}
private int bitField0_;
public static final int QUALIFIER_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString qualifier_;
/**
* <pre>
* The unique key which identifies this column within its family. This is the
* same key that's used to identify the column in, for example, a RowFilter
* which sets its `column_qualifier_regex_filter` field.
* May contain any byte string, including the empty string, up to 16kiB in
* length.
* </pre>
*
* <code>optional bytes qualifier = 1;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
public static final int CELLS_FIELD_NUMBER = 2;
private java.util.List<com.google.bigtable.v2.Cell> cells_;
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public java.util.List<com.google.bigtable.v2.Cell> getCellsList() {
return cells_;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public java.util.List<? extends com.google.bigtable.v2.CellOrBuilder>
getCellsOrBuilderList() {
return cells_;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public int getCellsCount() {
return cells_.size();
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public com.google.bigtable.v2.Cell getCells(int index) {
return cells_.get(index);
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public com.google.bigtable.v2.CellOrBuilder getCellsOrBuilder(
int index) {
return cells_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!qualifier_.isEmpty()) {
output.writeBytes(1, qualifier_);
}
for (int i = 0; i < cells_.size(); i++) {
output.writeMessage(2, cells_.get(i));
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!qualifier_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, qualifier_);
}
for (int i = 0; i < cells_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, cells_.get(i));
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.bigtable.v2.Column)) {
return super.equals(obj);
}
com.google.bigtable.v2.Column other = (com.google.bigtable.v2.Column) obj;
boolean result = true;
result = result && getQualifier()
.equals(other.getQualifier());
result = result && getCellsList()
.equals(other.getCellsList());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQualifier().hashCode();
if (getCellsCount() > 0) {
hash = (37 * hash) + CELLS_FIELD_NUMBER;
hash = (53 * hash) + getCellsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.bigtable.v2.Column parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v2.Column parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v2.Column parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v2.Column parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v2.Column parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.Column parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.bigtable.v2.Column parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.Column parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.bigtable.v2.Column parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.Column parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.bigtable.v2.Column prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Specifies (some of) the contents of a single row/column intersection of a
* table.
* </pre>
*
* Protobuf type {@code google.bigtable.v2.Column}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.bigtable.v2.Column)
com.google.bigtable.v2.ColumnOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_Column_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_Column_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.v2.Column.class, com.google.bigtable.v2.Column.Builder.class);
}
// Construct using com.google.bigtable.v2.Column.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getCellsFieldBuilder();
}
}
public Builder clear() {
super.clear();
qualifier_ = com.google.protobuf.ByteString.EMPTY;
if (cellsBuilder_ == null) {
cells_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
cellsBuilder_.clear();
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_Column_descriptor;
}
public com.google.bigtable.v2.Column getDefaultInstanceForType() {
return com.google.bigtable.v2.Column.getDefaultInstance();
}
public com.google.bigtable.v2.Column build() {
com.google.bigtable.v2.Column result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.bigtable.v2.Column buildPartial() {
com.google.bigtable.v2.Column result = new com.google.bigtable.v2.Column(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.qualifier_ = qualifier_;
if (cellsBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
cells_ = java.util.Collections.unmodifiableList(cells_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.cells_ = cells_;
} else {
result.cells_ = cellsBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.bigtable.v2.Column) {
return mergeFrom((com.google.bigtable.v2.Column)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.bigtable.v2.Column other) {
if (other == com.google.bigtable.v2.Column.getDefaultInstance()) return this;
if (other.getQualifier() != com.google.protobuf.ByteString.EMPTY) {
setQualifier(other.getQualifier());
}
if (cellsBuilder_ == null) {
if (!other.cells_.isEmpty()) {
if (cells_.isEmpty()) {
cells_ = other.cells_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureCellsIsMutable();
cells_.addAll(other.cells_);
}
onChanged();
}
} else {
if (!other.cells_.isEmpty()) {
if (cellsBuilder_.isEmpty()) {
cellsBuilder_.dispose();
cellsBuilder_ = null;
cells_ = other.cells_;
bitField0_ = (bitField0_ & ~0x00000002);
cellsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getCellsFieldBuilder() : null;
} else {
cellsBuilder_.addAllMessages(other.cells_);
}
}
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.bigtable.v2.Column parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.bigtable.v2.Column) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
/**
* <pre>
* The unique key which identifies this column within its family. This is the
* same key that's used to identify the column in, for example, a RowFilter
* which sets its `column_qualifier_regex_filter` field.
* May contain any byte string, including the empty string, up to 16kiB in
* length.
* </pre>
*
* <code>optional bytes qualifier = 1;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
/**
* <pre>
* The unique key which identifies this column within its family. This is the
* same key that's used to identify the column in, for example, a RowFilter
* which sets its `column_qualifier_regex_filter` field.
* May contain any byte string, including the empty string, up to 16kiB in
* length.
* </pre>
*
* <code>optional bytes qualifier = 1;</code>
*/
public Builder setQualifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
qualifier_ = value;
onChanged();
return this;
}
/**
* <pre>
* The unique key which identifies this column within its family. This is the
* same key that's used to identify the column in, for example, a RowFilter
* which sets its `column_qualifier_regex_filter` field.
* May contain any byte string, including the empty string, up to 16kiB in
* length.
* </pre>
*
* <code>optional bytes qualifier = 1;</code>
*/
public Builder clearQualifier() {
qualifier_ = getDefaultInstance().getQualifier();
onChanged();
return this;
}
private java.util.List<com.google.bigtable.v2.Cell> cells_ =
java.util.Collections.emptyList();
private void ensureCellsIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
cells_ = new java.util.ArrayList<com.google.bigtable.v2.Cell>(cells_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.bigtable.v2.Cell, com.google.bigtable.v2.Cell.Builder, com.google.bigtable.v2.CellOrBuilder> cellsBuilder_;
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public java.util.List<com.google.bigtable.v2.Cell> getCellsList() {
if (cellsBuilder_ == null) {
return java.util.Collections.unmodifiableList(cells_);
} else {
return cellsBuilder_.getMessageList();
}
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public int getCellsCount() {
if (cellsBuilder_ == null) {
return cells_.size();
} else {
return cellsBuilder_.getCount();
}
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public com.google.bigtable.v2.Cell getCells(int index) {
if (cellsBuilder_ == null) {
return cells_.get(index);
} else {
return cellsBuilder_.getMessage(index);
}
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder setCells(
int index, com.google.bigtable.v2.Cell value) {
if (cellsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCellsIsMutable();
cells_.set(index, value);
onChanged();
} else {
cellsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder setCells(
int index, com.google.bigtable.v2.Cell.Builder builderForValue) {
if (cellsBuilder_ == null) {
ensureCellsIsMutable();
cells_.set(index, builderForValue.build());
onChanged();
} else {
cellsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder addCells(com.google.bigtable.v2.Cell value) {
if (cellsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCellsIsMutable();
cells_.add(value);
onChanged();
} else {
cellsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder addCells(
int index, com.google.bigtable.v2.Cell value) {
if (cellsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCellsIsMutable();
cells_.add(index, value);
onChanged();
} else {
cellsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder addCells(
com.google.bigtable.v2.Cell.Builder builderForValue) {
if (cellsBuilder_ == null) {
ensureCellsIsMutable();
cells_.add(builderForValue.build());
onChanged();
} else {
cellsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder addCells(
int index, com.google.bigtable.v2.Cell.Builder builderForValue) {
if (cellsBuilder_ == null) {
ensureCellsIsMutable();
cells_.add(index, builderForValue.build());
onChanged();
} else {
cellsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder addAllCells(
java.lang.Iterable<? extends com.google.bigtable.v2.Cell> values) {
if (cellsBuilder_ == null) {
ensureCellsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, cells_);
onChanged();
} else {
cellsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder clearCells() {
if (cellsBuilder_ == null) {
cells_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
cellsBuilder_.clear();
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public Builder removeCells(int index) {
if (cellsBuilder_ == null) {
ensureCellsIsMutable();
cells_.remove(index);
onChanged();
} else {
cellsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public com.google.bigtable.v2.Cell.Builder getCellsBuilder(
int index) {
return getCellsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public com.google.bigtable.v2.CellOrBuilder getCellsOrBuilder(
int index) {
if (cellsBuilder_ == null) {
return cells_.get(index); } else {
return cellsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public java.util.List<? extends com.google.bigtable.v2.CellOrBuilder>
getCellsOrBuilderList() {
if (cellsBuilder_ != null) {
return cellsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cells_);
}
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public com.google.bigtable.v2.Cell.Builder addCellsBuilder() {
return getCellsFieldBuilder().addBuilder(
com.google.bigtable.v2.Cell.getDefaultInstance());
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public com.google.bigtable.v2.Cell.Builder addCellsBuilder(
int index) {
return getCellsFieldBuilder().addBuilder(
index, com.google.bigtable.v2.Cell.getDefaultInstance());
}
/**
* <pre>
* Must not be empty. Sorted in order of decreasing "timestamp_micros".
* </pre>
*
* <code>repeated .google.bigtable.v2.Cell cells = 2;</code>
*/
public java.util.List<com.google.bigtable.v2.Cell.Builder>
getCellsBuilderList() {
return getCellsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.bigtable.v2.Cell, com.google.bigtable.v2.Cell.Builder, com.google.bigtable.v2.CellOrBuilder>
getCellsFieldBuilder() {
if (cellsBuilder_ == null) {
cellsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.bigtable.v2.Cell, com.google.bigtable.v2.Cell.Builder, com.google.bigtable.v2.CellOrBuilder>(
cells_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
cells_ = null;
}
return cellsBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.bigtable.v2.Column)
}
// @@protoc_insertion_point(class_scope:google.bigtable.v2.Column)
private static final com.google.bigtable.v2.Column DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.bigtable.v2.Column();
}
public static com.google.bigtable.v2.Column getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Column>
PARSER = new com.google.protobuf.AbstractParser<Column>() {
public Column parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Column(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Column> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Column> getParserForType() {
return PARSER;
}
public com.google.bigtable.v2.Column getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.base.Preconditions;
import com.google.common.base.Verify;
import com.google.common.collect.Collections2;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ArtifactSkyKey;
import com.google.devtools.build.lib.actions.FileArtifactValue;
import com.google.devtools.build.lib.actions.FileValue;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.ResolvedFile;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.ResolvedFileFactory;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.TraversalRequest;
import com.google.devtools.build.lib.vfs.Dirent;
import com.google.devtools.build.lib.vfs.FileStatus;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import com.google.devtools.build.lib.vfs.RootedPath;
import com.google.devtools.build.lib.vfs.Symlinks;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/** A {@link SkyFunction} to build {@link RecursiveFilesystemTraversalValue}s. */
public final class RecursiveFilesystemTraversalFunction implements SkyFunction {
private static final class MissingDepException extends Exception {}
/** Base class for exceptions that {@link RecursiveFilesystemTraversalFunctionException} wraps. */
public abstract static class RecursiveFilesystemTraversalException extends Exception {
protected RecursiveFilesystemTraversalException(String message) {
super(message);
}
}
/** Thrown when a generated directory's root-relative path conflicts with a package's path. */
public static final class GeneratedPathConflictException extends
RecursiveFilesystemTraversalException {
GeneratedPathConflictException(TraversalRequest traversal) {
super(
String.format(
"Generated directory %s conflicts with package under the same path. "
+ "Additional info: %s",
traversal.root.asRootedPath().getRootRelativePath().getPathString(),
traversal.errorInfo != null ? traversal.errorInfo : traversal.toString()));
}
}
/**
* Thrown when the traversal encounters a subdirectory with a BUILD file but is not allowed to
* recurse into it. See {@code PackageBoundaryMode#REPORT_ERROR}.
*/
public static final class CannotCrossPackageBoundaryException extends
RecursiveFilesystemTraversalException {
CannotCrossPackageBoundaryException(String message) {
super(message);
}
}
/**
* Thrown when a dangling symlink is attempted to be dereferenced.
*
* <p>Note: this class is not identical to the one in com.google.devtools.build.lib.view.fileset
* and it's not easy to merge the two because of the dependency structure. The other one will
* probably be removed along with the rest of the legacy Fileset code.
*/
public static final class DanglingSymlinkException extends RecursiveFilesystemTraversalException {
public final String path;
public final String unresolvedLink;
public DanglingSymlinkException(String path, String unresolvedLink) {
super(
String.format(
"Found dangling symlink: %s, unresolved path: \"%s\"", path, unresolvedLink));
Preconditions.checkArgument(path != null && !path.isEmpty());
Preconditions.checkArgument(unresolvedLink != null && !unresolvedLink.isEmpty());
this.path = path;
this.unresolvedLink = unresolvedLink;
}
public String getPath() {
return path;
}
}
/** Thrown when we encounter errors from underlying File operations */
public static final class FileOperationException extends RecursiveFilesystemTraversalException {
public FileOperationException(String message) {
super(message);
}
}
/** Exception type thrown by {@link RecursiveFilesystemTraversalFunction#compute}. */
private static final class RecursiveFilesystemTraversalFunctionException extends
SkyFunctionException {
RecursiveFilesystemTraversalFunctionException(RecursiveFilesystemTraversalException e) {
super(e, Transience.PERSISTENT);
}
}
@Override
public SkyValue compute(SkyKey skyKey, Environment env)
throws RecursiveFilesystemTraversalFunctionException, InterruptedException {
TraversalRequest traversal = (TraversalRequest) skyKey.argument();
try {
// Stat the traversal root.
FileInfo rootInfo = lookUpFileInfo(env, traversal);
if (!rootInfo.type.exists()) {
// May be a dangling symlink or a non-existent file. Handle gracefully.
if (rootInfo.type.isSymlink()) {
return resultForDanglingSymlink(traversal.root.asRootedPath(), rootInfo);
} else {
return RecursiveFilesystemTraversalValue.EMPTY;
}
}
if (rootInfo.type.isFile()) {
return resultForFileRoot(traversal.root.asRootedPath(), rootInfo);
}
// Otherwise the root is a directory or a symlink to one.
PkgLookupResult pkgLookupResult = checkIfPackage(env, traversal, rootInfo);
traversal = pkgLookupResult.traversal;
if (pkgLookupResult.isConflicting()) {
// The traversal was requested for an output directory whose root-relative path conflicts
// with a source package. We can't handle that, bail out.
throw new RecursiveFilesystemTraversalFunctionException(
new GeneratedPathConflictException(traversal));
} else if (pkgLookupResult.isPackage() && !traversal.skipTestingForSubpackage) {
// The traversal was requested for a directory that defines a package.
String msg =
traversal.errorInfo
+ " crosses package boundary into package rooted at "
+ traversal.root.asRootedPath().getRootRelativePath().getPathString();
switch (traversal.crossPkgBoundaries) {
case CROSS:
// We are free to traverse the subpackage but we need to display a warning.
env.getListener().handle(Event.warn(null, msg));
break;
case DONT_CROSS:
// We cannot traverse the subpackage and should skip it silently. Return empty results.
return RecursiveFilesystemTraversalValue.EMPTY;
case REPORT_ERROR:
// We cannot traverse the subpackage and should complain loudly (display an error).
throw new RecursiveFilesystemTraversalFunctionException(
new CannotCrossPackageBoundaryException(msg));
default:
throw new IllegalStateException(traversal.toString());
}
}
// We are free to traverse this directory.
Collection<SkyKey> dependentKeys = createRecursiveTraversalKeys(env, traversal);
return resultForDirectory(
traversal,
rootInfo,
traverseChildren(env, dependentKeys, /*inline=*/ traversal.isRootGenerated));
} catch (IOException e) {
throw new RecursiveFilesystemTraversalFunctionException(
new FileOperationException("Error while traversing fileset: " + e.getMessage()));
} catch (MissingDepException e) {
return null;
}
}
@Override
public String extractTag(SkyKey skyKey) {
return null;
}
private static final class FileInfo {
final FileType type;
final Object metadata;
@Nullable final RootedPath realPath;
@Nullable final PathFragment unresolvedSymlinkTarget;
FileInfo(
FileType type,
Object metadata,
@Nullable RootedPath realPath,
@Nullable PathFragment unresolvedSymlinkTarget) {
this.type = Preconditions.checkNotNull(type);
this.metadata = metadata;
this.realPath = realPath;
this.unresolvedSymlinkTarget = unresolvedSymlinkTarget;
}
@Override
public String toString() {
if (type.isSymlink()) {
return String.format("(%s: link_value=%s, real_path=%s)", type,
unresolvedSymlinkTarget.getPathString(), realPath);
} else {
return String.format("(%s: real_path=%s)", type, realPath);
}
}
}
private static final FileInfo NON_EXISTENT_FILE_INFO =
new FileInfo(FileType.NONEXISTENT, new Integer(0), null, null);
private static FileInfo lookUpFileInfo(Environment env, TraversalRequest traversal)
throws MissingDepException, IOException, InterruptedException {
if (traversal.isRootGenerated) {
FileArtifactValue fsVal = null;
if (traversal.root.getOutputArtifact() != null) {
Artifact artifact = traversal.root.getOutputArtifact();
SkyKey artifactKey = ArtifactSkyKey.key(artifact, true);
SkyValue value = env.getValue(artifactKey);
if (env.valuesMissing()) {
throw new MissingDepException();
}
if (value instanceof FileArtifactValue) {
fsVal = (FileArtifactValue) value;
} else {
return NON_EXISTENT_FILE_INFO;
}
}
RootedPath realPath = traversal.root.asRootedPath();
if (traversal.strictOutputFiles) {
Preconditions.checkNotNull(fsVal, "Strict Fileset output tree has null FileArtifactValue");
return new FileInfo(FileType.FILE, fsVal, realPath, null);
} else {
// FileArtifactValue does not currently track symlinks. If it did, we could potentially
// remove some of the filesystem operations we're doing here.
Path path = traversal.root.asRootedPath().asPath();
FileStatus noFollowStat = path.stat(Symlinks.NOFOLLOW);
FileStatus followStat = path.statIfFound(Symlinks.FOLLOW);
FileType type;
PathFragment unresolvedLinkTarget = null;
if (followStat == null) {
type = FileType.DANGLING_SYMLINK;
if (!noFollowStat.isSymbolicLink()) {
throw new IOException("Expected symlink for " + path + ", but got: " + noFollowStat);
}
unresolvedLinkTarget = path.readSymbolicLink();
} else if (noFollowStat.isFile()) {
type = FileType.FILE;
} else if (noFollowStat.isDirectory()) {
type = FileType.DIRECTORY;
} else {
unresolvedLinkTarget = path.readSymbolicLink();
realPath =
RootedPath.toRootedPath(
Root.absoluteRoot(path.getFileSystem()), path.resolveSymbolicLinks());
type = followStat.isFile() ? FileType.SYMLINK_TO_FILE : FileType.SYMLINK_TO_DIRECTORY;
}
return new FileInfo(
type, fsVal != null ? fsVal : noFollowStat.hashCode(), realPath, unresolvedLinkTarget);
}
} else {
// Stat the file.
FileValue fileValue =
(FileValue) env.getValueOrThrow(
FileValue.key(traversal.root.asRootedPath()), IOException.class);
if (env.valuesMissing()) {
throw new MissingDepException();
}
if (fileValue.exists()) {
// If it exists, it may either be a symlink or a file/directory.
PathFragment unresolvedLinkTarget = null;
FileType type;
if (fileValue.isSymlink()) {
unresolvedLinkTarget = fileValue.getUnresolvedLinkTarget();
type = fileValue.isDirectory() ? FileType.SYMLINK_TO_DIRECTORY : FileType.SYMLINK_TO_FILE;
} else {
type = fileValue.isDirectory() ? FileType.DIRECTORY : FileType.FILE;
}
return new FileInfo(
type, fileValue.realFileStateValue(), fileValue.realRootedPath(), unresolvedLinkTarget);
} else {
// If it doesn't exist, or it's a dangling symlink, we still want to handle that gracefully.
return new FileInfo(
fileValue.isSymlink() ? FileType.DANGLING_SYMLINK : FileType.NONEXISTENT,
fileValue.realFileStateValue(),
null,
fileValue.isSymlink() ? fileValue.getUnresolvedLinkTarget() : null);
}
}
}
private static final class PkgLookupResult {
private enum Type {
CONFLICT, DIRECTORY, PKG
}
private final Type type;
final TraversalRequest traversal;
final FileInfo rootInfo;
/** Result for a generated directory that conflicts with a source package. */
static PkgLookupResult conflict(TraversalRequest traversal, FileInfo rootInfo) {
return new PkgLookupResult(Type.CONFLICT, traversal, rootInfo);
}
/** Result for a source or generated directory (not a package). */
static PkgLookupResult directory(TraversalRequest traversal, FileInfo rootInfo) {
return new PkgLookupResult(Type.DIRECTORY, traversal, rootInfo);
}
/** Result for a package, i.e. a directory with a BUILD file. */
static PkgLookupResult pkg(TraversalRequest traversal, FileInfo rootInfo) {
return new PkgLookupResult(Type.PKG, traversal, rootInfo);
}
private PkgLookupResult(Type type, TraversalRequest traversal, FileInfo rootInfo) {
this.type = Preconditions.checkNotNull(type);
this.traversal = Preconditions.checkNotNull(traversal);
this.rootInfo = Preconditions.checkNotNull(rootInfo);
}
boolean isPackage() {
return type == Type.PKG;
}
boolean isConflicting() {
return type == Type.CONFLICT;
}
@Override
public String toString() {
return String.format("(%s: info=%s, traversal=%s)", type, rootInfo, traversal);
}
}
/**
* Checks whether the {@code traversal}'s path refers to a package directory.
*
* @return the result of the lookup; it contains potentially new {@link TraversalRequest} and
* {@link FileInfo} so the caller should use these instead of the old ones (this happens when
* a package is found, but under a different root than expected)
*/
private static PkgLookupResult checkIfPackage(
Environment env, TraversalRequest traversal, FileInfo rootInfo)
throws MissingDepException, IOException, InterruptedException {
Preconditions.checkArgument(rootInfo.type.exists() && !rootInfo.type.isFile(),
"{%s} {%s}", traversal, rootInfo);
PackageLookupValue pkgLookup =
(PackageLookupValue)
getDependentSkyValue(env,
PackageLookupValue.key(traversal.root.asRootedPath().getRootRelativePath()));
if (pkgLookup.packageExists()) {
if (traversal.isRootGenerated) {
// The traversal's root was a generated directory, but its root-relative path conflicts with
// an existing package.
return PkgLookupResult.conflict(traversal, rootInfo);
} else {
// The traversal's root was a source directory and it defines a package.
Root pkgRoot = pkgLookup.getRoot();
if (!pkgRoot.equals(traversal.root.asRootedPath().getRoot())) {
// However the root of this package is different from what we expected. stat() the real
// BUILD file of that package.
traversal = traversal.forChangedRootPath(pkgRoot);
rootInfo = lookUpFileInfo(env, traversal);
Verify.verify(rootInfo.type.exists(), "{%s} {%s}", traversal, rootInfo);
}
return PkgLookupResult.pkg(traversal, rootInfo);
}
} else {
// The traversal's root was a directory (source or generated one), no package exists under the
// same root-relative path.
return PkgLookupResult.directory(traversal, rootInfo);
}
}
/**
* List the directory and create {@code SkyKey}s to request contents of its children recursively.
*
* <p>The returned keys are of type {@link SkyFunctions#RECURSIVE_FILESYSTEM_TRAVERSAL}.
*/
private static Collection<SkyKey> createRecursiveTraversalKeys(
Environment env, TraversalRequest traversal)
throws MissingDepException, InterruptedException, IOException {
// Use the traversal's path, even if it's a symlink. The contents of the directory, as listed
// in the result, must be relative to it.
Iterable<Dirent> dirents;
if (traversal.isRootGenerated) {
// If we're dealing with an output file, read the directory directly instead of creating
// filesystem nodes under the output tree.
List<Dirent> direntsCollection =
new ArrayList<>(
traversal.root.asRootedPath().asPath().readdir(Symlinks.FOLLOW));
Collections.sort(direntsCollection);
dirents = direntsCollection;
} else {
dirents = ((DirectoryListingValue) getDependentSkyValue(env,
DirectoryListingValue.key(traversal.root.asRootedPath()))).getDirents();
}
List<SkyKey> result = new ArrayList<>();
for (Dirent dirent : dirents) {
RootedPath childPath =
RootedPath.toRootedPath(
traversal.root.asRootedPath().getRoot(),
traversal.root.asRootedPath().getRootRelativePath().getRelative(dirent.getName()));
TraversalRequest childTraversal = traversal.forChildEntry(childPath);
result.add(childTraversal);
}
return result;
}
/**
* Creates result for a dangling symlink.
*
* @param linkName path to the symbolic link
* @param info the {@link FileInfo} associated with the link file
*/
private static RecursiveFilesystemTraversalValue resultForDanglingSymlink(RootedPath linkName,
FileInfo info) {
Preconditions.checkState(info.type.isSymlink() && !info.type.exists(), "{%s} {%s}", linkName,
info.type);
return RecursiveFilesystemTraversalValue.of(
ResolvedFileFactory.danglingSymlink(linkName, info.unresolvedSymlinkTarget, info.metadata));
}
/**
* Creates results for a file or for a symlink that points to one.
*
* <p>A symlink may be direct (points to a file) or transitive (points at a direct or transitive
* symlink).
*/
private static RecursiveFilesystemTraversalValue resultForFileRoot(RootedPath path,
FileInfo info) {
Preconditions.checkState(info.type.isFile() && info.type.exists(), "{%s} {%s}", path,
info.type);
if (info.type.isSymlink()) {
return RecursiveFilesystemTraversalValue.of(
ResolvedFileFactory.symlinkToFile(
info.realPath, path, info.unresolvedSymlinkTarget, info.metadata));
} else {
return RecursiveFilesystemTraversalValue.of(
ResolvedFileFactory.regularFile(path, info.metadata));
}
}
private static RecursiveFilesystemTraversalValue resultForDirectory(TraversalRequest traversal,
FileInfo rootInfo, Collection<RecursiveFilesystemTraversalValue> subdirTraversals) {
// Collect transitive closure of files in subdirectories.
NestedSetBuilder<ResolvedFile> paths = NestedSetBuilder.stableOrder();
for (RecursiveFilesystemTraversalValue child : subdirTraversals) {
paths.addTransitive(child.getTransitiveFiles());
}
ResolvedFile root;
if (rootInfo.type.isSymlink()) {
NestedSet<ResolvedFile> children = paths.build();
root =
ResolvedFileFactory.symlinkToDirectory(
rootInfo.realPath,
traversal.root.asRootedPath(),
rootInfo.unresolvedSymlinkTarget,
hashDirectorySymlink(children, rootInfo.metadata));
paths = NestedSetBuilder.<ResolvedFile>stableOrder().addTransitive(children).add(root);
} else {
root = ResolvedFileFactory.directory(rootInfo.realPath);
}
return RecursiveFilesystemTraversalValue.of(root, paths.build());
}
private static int hashDirectorySymlink(Iterable<ResolvedFile> children, Object metadata) {
// If the root is a directory symlink, the associated FileStateValue does not change when the
// linked directory's contents change, so we can't use the FileStateValue as metadata like we
// do with other ResolvedFile kinds. Instead we compute a metadata hash from the child
// elements and return that as the ResolvedFile's metadata hash.
// Compute the hash using the method described in Effective Java, 2nd ed., Item 9.
int result = 0;
for (ResolvedFile c : children) {
result = 31 * result + c.getMetadata().hashCode();
}
return 31 * result + metadata.hashCode();
}
private static SkyValue getDependentSkyValue(Environment env, SkyKey key)
throws MissingDepException, InterruptedException {
SkyValue value = env.getValue(key);
if (env.valuesMissing()) {
throw new MissingDepException();
}
return value;
}
/**
* Requests Skyframe to compute the dependent values and returns them.
*
* <p>The keys must all be {@link SkyFunctions#RECURSIVE_FILESYSTEM_TRAVERSAL} keys.
*/
private Collection<RecursiveFilesystemTraversalValue> traverseChildren(
Environment env, Iterable<SkyKey> keys, boolean inline)
throws MissingDepException, InterruptedException,
RecursiveFilesystemTraversalFunctionException {
Map<SkyKey, SkyValue> values;
if (inline) {
// Don't create Skyframe nodes for a recursive traversal over the output tree.
// Instead, inline the recursion in the top-level request.
values = new HashMap<>();
for (SkyKey depKey : keys) {
values.put(depKey, compute(depKey, env));
}
} else {
values = env.getValues(keys);
}
if (env.valuesMissing()) {
throw new MissingDepException();
}
return Collections2.transform(values.values(), RecursiveFilesystemTraversalValue.class::cast);
}
/** Type information about the filesystem entry residing at a path. */
enum FileType {
/** A regular file. */
FILE {
@Override boolean isFile() { return true; }
@Override boolean exists() { return true; }
@Override public String toString() { return "<f>"; }
},
/**
* A symlink to a regular file.
*
* <p>The symlink may be direct (points to a non-symlink (here a file)) or it may be transitive
* (points to a direct or transitive symlink).
*/
SYMLINK_TO_FILE {
@Override boolean isFile() { return true; }
@Override boolean isSymlink() { return true; }
@Override boolean exists() { return true; }
@Override public String toString() { return "<lf>"; }
},
/** A directory. */
DIRECTORY {
@Override boolean isDirectory() { return true; }
@Override boolean exists() { return true; }
@Override public String toString() { return "<d>"; }
},
/**
* A symlink to a directory.
*
* <p>The symlink may be direct (points to a non-symlink (here a directory)) or it may be
* transitive (points to a direct or transitive symlink).
*/
SYMLINK_TO_DIRECTORY {
@Override boolean isDirectory() { return true; }
@Override boolean isSymlink() { return true; }
@Override boolean exists() { return true; }
@Override public String toString() { return "<ld>"; }
},
/** A dangling symlink, i.e. one whose target is known not to exist. */
DANGLING_SYMLINK {
@Override boolean isFile() { throw new UnsupportedOperationException(); }
@Override boolean isDirectory() { throw new UnsupportedOperationException(); }
@Override boolean isSymlink() { return true; }
@Override public String toString() { return "<l?>"; }
},
/** A path that does not exist or should be ignored. */
NONEXISTENT {
@Override public String toString() { return "<?>"; }
};
boolean isFile() { return false; }
boolean isDirectory() { return false; }
boolean isSymlink() { return false; }
boolean exists() { return false; }
@Override public abstract String toString();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.