repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
canyinghao/CanRecyclerView
canrecyclerview/src/main/java/com/canyinghao/canrecyclerview/CanLinearLayoutManager.java
1644
package com.canyinghao.canrecyclerview; import android.content.Context; import androidx.recyclerview.widget.RecyclerView; /** * Created by canyinghao on 15/12/17.. * Copyright 2016 canyinghao * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public class CanLinearLayoutManager extends LinearLayoutManagerFix{ private static final int DEFAULT_EXTRA_LAYOUT_SPACE = 600; private int extraLayoutSpace = -1; public CanLinearLayoutManager(Context context) { super(context); } public CanLinearLayoutManager(Context context, int extraLayoutSpace) { super(context); this.extraLayoutSpace = extraLayoutSpace; } public CanLinearLayoutManager(Context context, int orientation, boolean reverseLayout) { super(context, orientation, reverseLayout); } public void setExtraLayoutSpace(int extraLayoutSpace) { this.extraLayoutSpace = extraLayoutSpace; } @Override protected int getExtraLayoutSpace(RecyclerView.State state) { if (extraLayoutSpace > 0) { return extraLayoutSpace; } return DEFAULT_EXTRA_LAYOUT_SPACE; } }
apache-2.0
sai-pullabhotla/catatumbo
src/main/java/com/jmethods/catatumbo/TransactionalTask.java
1380
/* * Copyright 2016 Sai Pullabhotla. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jmethods.catatumbo; /** * Interface for Callbacks to run Datastore operations within a Transaction. * * @author Sai Pullabhotla * @param <T> * the result type of this {@code TransactionalTask}. * @see EntityManager#executeInTransaction(TransactionalTask) * */ @FunctionalInterface public interface TransactionalTask<T> { /** * Executes the task. After the execute method finishes normally, the transaction will be * committed by the {@link EntityManager}. If the execute method throws any exception, the * transaction will be rolled back. * * @param transaction * the transaction to read from/write to the Cloud Datastore. * @return the result of execution */ T execute(DatastoreTransaction transaction); }
apache-2.0
dickschoeller/gedbrowser
gedbrowser-renderer/src/main/java/org/schoellerfamily/gedbrowser/renderer/PlaceInfo.java
3545
package org.schoellerfamily.gedbrowser.renderer; import org.geojson.LngLatAlt; /** * @author Dick Schoeller */ public final class PlaceInfo { /** * The place name to put on the map. */ private final String placeName; /** * The location to put the pin. */ private final LngLatAlt location; /** * Southwest corner of the bounding box. */ private final LngLatAlt southwest; /** * Northeast corner of the bounding box. */ private final LngLatAlt northeast; /** * @param placeName the place name to put on the map * @param latitude the latitude to put the pin * @param longitude the longitude to put the pin */ public PlaceInfo(final String placeName, final Double latitude, final Double longitude) { this.placeName = placeName; Double lat; Double lng; if (latitude == null) { lat = Double.NaN; } else { lat = latitude; } if (longitude == null) { lng = Double.NaN; } else { lng = longitude; } location = new LngLatAlt(lng, lat); if (latitude == null || longitude == null) { southwest = new LngLatAlt(Double.NaN, Double.NaN); northeast = new LngLatAlt(Double.NaN, Double.NaN); } else { final double confidence = .01; southwest = new LngLatAlt(longitude - confidence, latitude - confidence); northeast = new LngLatAlt(longitude + confidence, latitude + confidence); } } /** * @param placeName the name of the place * @param location the location of the pin * @param southwest viewport southwest * @param northeast viewport northeast */ public PlaceInfo(final String placeName, final LngLatAlt location, final LngLatAlt southwest, final LngLatAlt northeast) { this.placeName = placeName; this.location = location; this.southwest = southwest; this.northeast = northeast; } /** * @return the place name */ public String getPlaceName() { return placeName; } /** * @return the location */ public LngLatAlt getLocation() { return location; } /** * @return the southwest corner of the viewport */ public LngLatAlt getSouthwest() { return southwest; } /** * @return the northeast corner of the viewport */ public LngLatAlt getNortheast() { return northeast; } /** * {@inheritDoc} */ @Override public String toString() { if (placeName == null) { return String.format( "{ \"placeName\":null, " + "\"latitude\":%4.6f, \"longitude\":%4.6f }", location.getLatitude(), location.getLongitude()); } return String.format( "{ \"placeName\":\"%s\", " + "\"latitude\":%4.6f, \"longitude\":%4.6f, " + "\"southwest\": { \"latitude\":%4.6f," + " \"longitude\":%4.6f }, " + "\"northeast\": { \"latitude\":%4.6f," + " \"longitude\":%4.6f } }", placeName, location.getLatitude(), location.getLongitude(), southwest.getLatitude(), southwest.getLongitude(), northeast.getLatitude(), northeast.getLongitude()); } }
apache-2.0
charles-cooper/idylfin
src/com/opengamma/analytics/financial/model/finitedifference/CrankNicolsonFiniteDifferenceSOR.java
12885
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.model.finitedifference; import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang.Validate; import com.opengamma.analytics.math.function.Function1D; import com.opengamma.analytics.math.surface.Surface; /** * Crank-Nicolson scheme using SOR algorithm to solve the matrix system at each time step. * <b>Note</b> this is for testing purposes and is not recommended for actual use. Use ThetaMethodFiniteDifference for production. */ public class CrankNicolsonFiniteDifferenceSOR implements ConvectionDiffusionPDESolver { private final double _theta; /** * Sets up a standard Crank-Nicolson scheme */ public CrankNicolsonFiniteDifferenceSOR() { _theta = 0.5; } /** * Sets up a scheme that is the weighted average of an explicit and an implicit scheme * @param theta The weight. theta = 0 - fully explicit, theta = 0.5 - Crank-Nicolson, theta = 1.0 - fully implicit */ public CrankNicolsonFiniteDifferenceSOR(final double theta) { Validate.isTrue(theta >= 0 && theta <= 1.0, "theta must be in the range 0 to 1"); _theta = theta; } @SuppressWarnings("deprecation") @Override public PDEResults1D solve(final ZZConvectionDiffusionPDEDataBundle pdeData, final int tSteps, final int xSteps, final double tMax, final BoundaryCondition lowerBoundary, final BoundaryCondition upperBoundary) { return solve(pdeData, tSteps, xSteps, tMax, lowerBoundary, upperBoundary, null); } @SuppressWarnings("deprecation") @Override public PDEResults1D solve(final ZZConvectionDiffusionPDEDataBundle pdeData, final int tSteps, final int xSteps, final double tMax, final BoundaryCondition lowerBoundary, final BoundaryCondition upperBoundary, final Surface<Double, Double, Double> freeBoundary) { return solve(pdeData.getCoefficients(), pdeData.getInitialCondition(), tSteps, xSteps, tMax, lowerBoundary, upperBoundary, freeBoundary); } @Override public PDEResults1D solve(PDE1DDataBundle<ConvectionDiffusionPDE1DCoefficients> pdeData) { throw new NotImplementedException(); } @Override public PDEResults1D solve(ConvectionDiffusionPDE1DStandardCoefficients pdeData, Function1D<Double, Double> initialCondition, int tSteps, int xSteps, double tMax, BoundaryCondition lowerBoundary, BoundaryCondition upperBoundary) { return solve(pdeData, initialCondition, tSteps, xSteps, tMax, lowerBoundary, upperBoundary, null); } @Override public PDEResults1D solve(ConvectionDiffusionPDE1DStandardCoefficients pdeData, Function1D<Double, Double> initialCondition, int tSteps, int xSteps, double tMax, BoundaryCondition lowerBoundary, BoundaryCondition upperBoundary, Surface<Double, Double, Double> freeBoundary) { // simple test code - doesn't use a PDEGrid1D final PDEGrid1D grid = new PDEGrid1D(tSteps + 1, xSteps + 1, tMax, lowerBoundary.getLevel(), upperBoundary.getLevel()); final double dt = tMax / (tSteps); final double dx = (upperBoundary.getLevel() - lowerBoundary.getLevel()) / (xSteps); final double dtdx2 = dt / dx / dx; final double dtdx = dt / dx; final double[] f = new double[xSteps + 1]; final double[] x = new double[xSteps + 1]; final double[] q = new double[xSteps + 1]; final double[][] m = new double[xSteps + 1][xSteps + 1]; // double[] coefficients = new double[3]; double currentX = lowerBoundary.getLevel(); double a, b, c, aa, bb, cc; for (int i = 0; i <= xSteps; i++) { currentX = lowerBoundary.getLevel() + i * dx; x[i] = currentX; final double value = initialCondition.evaluate(currentX); f[i] = value; } double t = 0.0; for (int n = 0; n < tSteps; n++) { t += dt; for (int i = 1; i < xSteps; i++) { a = pdeData.getA(t - dt, x[i]); b = pdeData.getB(t - dt, x[i]); c = pdeData.getC(t - dt, x[i]); double rho = a; // double bdx = (b * dx / 2); // if (Math.abs(bdx) > 10 * Math.abs(a)) { // rho = Math.abs(bdx); // } else if (Math.abs(a) > 10 * Math.abs(bdx)) { // rho = a; // } else { // rho = bdx / Math.tanh(bdx / a); // } aa = (1 - _theta) * (-dtdx2 * rho + 0.5 * dtdx * b); bb = 1 + (1 - _theta) * (2 * dtdx2 * rho - dt * c); cc = (1 - _theta) * (-dtdx2 * rho - 0.5 * dtdx * b); q[i] = aa * f[i - 1] + bb * f[i] + cc * f[i + 1]; // TODO could store these a = pdeData.getA(t, x[i]); b = pdeData.getB(t, x[i]); c = pdeData.getC(t, x[i]); rho = a; // bdx = (b * dx / 2); // if (Math.abs(bdx) > 10 * Math.abs(a)) { // rho = Math.abs(bdx); // } else if (Math.abs(a) > 10 * Math.abs(bdx)) { // rho = a; // } else { // rho = bdx / Math.tanh(bdx / a); // } aa = (-dtdx2 * rho + 0.5 * dtdx * b); bb = (2 * dtdx2 * rho - dt * c); cc = (-dtdx2 * rho - 0.5 * dtdx * b); m[i][i - 1] = -_theta * aa; m[i][i] = 1 - _theta * bb; m[i][i + 1] = -_theta * cc; } double[] temp = lowerBoundary.getLeftMatrixCondition(pdeData, grid, t); for (int k = 0; k < temp.length; k++) { m[0][k] = temp[k]; } temp = upperBoundary.getLeftMatrixCondition(pdeData, grid, t); for (int k = 0; k < temp.length; k++) { m[xSteps][xSteps - k] = temp[k]; } temp = lowerBoundary.getRightMatrixCondition(pdeData, grid, t); double sum = 0; for (int k = 0; k < temp.length; k++) { sum += temp[k] * f[k]; } q[0] = sum + lowerBoundary.getConstant(pdeData, t); temp = upperBoundary.getRightMatrixCondition(pdeData, grid, t); sum = 0; for (int k = 0; k < temp.length; k++) { sum += temp[k] * f[xSteps - k]; } q[xSteps] = sum + upperBoundary.getConstant(pdeData, t); // SOR final double omega = 1.0; double scale = 1.0; double errorSqr = Double.POSITIVE_INFINITY; while (errorSqr / (scale + 1e-10) > 1e-18) { errorSqr = 0.0; scale = 0.0; for (int j = 0; j <= xSteps; j++) { sum = 0; for (int k = 0; k <= xSteps; k++) { sum += m[j][k] * f[k]; } double correction = omega / m[j][j] * (q[j] - sum); if (freeBoundary != null) { correction = Math.max(correction, freeBoundary.getZValue(t, x[j]) - f[j]); } errorSqr += correction * correction; f[j] += correction; scale += f[j] * f[j]; } } } return new PDETerminalResults1D(grid, f); } @SuppressWarnings("deprecation") public PDEResults1D solve(final ZZConvectionDiffusionPDEDataBundle pdeData, final double[] timeGrid, final double[] spaceGrid, final BoundaryCondition lowerBoundary, final BoundaryCondition upperBoundary, final Surface<Double, Double, Double> freeBoundary) { Validate.notNull(pdeData, "pde data"); final PDEGrid1D grid = new PDEGrid1D(timeGrid, spaceGrid); final int tNodes = timeGrid.length; final int xNodes = spaceGrid.length; Validate.isTrue(tNodes > 1, "need at least 2 time nodes"); Validate.isTrue(xNodes > 2, "need at least 3 space nodes"); // TODO would like more sophistication that simply checking to the grid is consistent with the boundary level Validate.isTrue(Math.abs(spaceGrid[0] - lowerBoundary.getLevel()) < 1e-7, "space grid not consistent with boundary level"); Validate.isTrue(Math.abs(spaceGrid[xNodes - 1] - upperBoundary.getLevel()) < 1e-7, "space grid not consistent with boundary level"); final double[] dt = new double[tNodes - 1]; for (int n = 0; n < tNodes - 1; n++) { dt[n] = timeGrid[n + 1] - timeGrid[n]; Validate.isTrue(dt[n] > 0, "time steps must be increasing"); } final double[] dx = new double[xNodes - 1]; for (int i = 0; i < xNodes - 1; i++) { dx[i] = spaceGrid[i + 1] - spaceGrid[i]; Validate.isTrue(dx[i] > 0, "space steps must be increasing"); } final double[] f = new double[xNodes]; final double[] q = new double[xNodes]; final double[][] m = new double[xNodes][xNodes]; double a, b, c, aa, bb, cc; for (int i = 0; i < xNodes; i++) { f[i] = pdeData.getInitialValue(spaceGrid[i]); } for (int n = 1; n < tNodes; n++) { for (int i = 1; i < xNodes - 1; i++) { a = pdeData.getA(timeGrid[n - 1], spaceGrid[i]); b = pdeData.getB(timeGrid[n - 1], spaceGrid[i]); c = pdeData.getC(timeGrid[n - 1], spaceGrid[i]); aa = (1 - _theta) * dt[n - 1] * (-2 / dx[i - 1] / (dx[i - 1] + dx[i]) * a + dx[i] / dx[i - 1] / (dx[i - 1] + dx[i]) * b); bb = 1 + (1 - _theta) * dt[n - 1] * (2 / dx[i - 1] / dx[i] * a - (dx[i] - dx[i - 1]) / dx[i - 1] / dx[i] * b - c); cc = (1 - _theta) * dt[n - 1] * (-2 / dx[i] / (dx[i - 1] + dx[i]) * a - dx[i - 1] / dx[i] / (dx[i - 1] + dx[i]) * b); q[i] = aa * f[i - 1] + bb * f[i] + cc * f[i + 1]; // TODO could store these a = pdeData.getA(timeGrid[n], spaceGrid[i]); b = pdeData.getB(timeGrid[n], spaceGrid[i]); c = pdeData.getC(timeGrid[n], spaceGrid[i]); aa = dt[n - 1] * (-2 / dx[i - 1] / (dx[i - 1] + dx[i]) * a + dx[i] / dx[i - 1] / (dx[i - 1] + dx[i]) * b); bb = dt[n - 1] * (2 / dx[i - 1] / dx[i] * a - (dx[i] - dx[i - 1]) / dx[i - 1] / dx[i] * b - c); cc = dt[n - 1] * (-2 / dx[i] / (dx[i - 1] + dx[i]) * a - dx[i - 1] / dx[i] / (dx[i - 1] + dx[i]) * b); m[i][i - 1] = -_theta * aa; m[i][i] = 1 - _theta * bb; m[i][i + 1] = -_theta * cc; } double[] temp = lowerBoundary.getLeftMatrixCondition(pdeData.getCoefficients(), grid, timeGrid[n]); for (int k = 0; k < temp.length; k++) { m[0][k] = temp[k]; } temp = upperBoundary.getLeftMatrixCondition(pdeData.getCoefficients(), grid, timeGrid[n]); for (int k = 0; k < temp.length; k++) { m[xNodes - 1][xNodes - 1 - k] = temp[k]; } // debug // m[xNodes - 1][xNodes - 3] = 2 / dx[xNodes - 3] / (dx[xNodes - 3] + dx[xNodes - 2]); // m[xNodes - 1][xNodes - 2] = -2 / dx[xNodes - 3] / dx[xNodes - 2]; // m[xNodes - 1][xNodes - 1] = 2 / dx[xNodes - 2] / (dx[xNodes - 3] + dx[xNodes - 2]); temp = lowerBoundary.getRightMatrixCondition(pdeData.getCoefficients(), grid, timeGrid[n]); double sum = 0; for (int k = 0; k < temp.length; k++) { sum += temp[k] * f[k]; } q[0] = sum + lowerBoundary.getConstant(pdeData.getCoefficients(), timeGrid[n]); // TODO need to change how boundary are calculated - dx[0] wrong for non-constant grid temp = upperBoundary.getRightMatrixCondition(pdeData.getCoefficients(), grid, timeGrid[n]); sum = 0; for (int k = 0; k < temp.length; k++) { sum += temp[k] * f[xNodes - 1 - k]; } q[xNodes - 1] = sum + upperBoundary.getConstant(pdeData.getCoefficients(), timeGrid[n]); // SOR final double omega = 1.0; double scale = 1.0; double errorSqr = Double.POSITIVE_INFINITY; while (errorSqr / (scale + 1e-10) > 1e-18) { errorSqr = 0.0; scale = 0.0; for (int j = 0; j < xNodes; j++) { sum = 0; for (int k = 0; k < xNodes; k++) { sum += m[j][k] * f[k]; } double correction = omega / m[j][j] * (q[j] - sum); if (freeBoundary != null) { correction = Math.max(correction, freeBoundary.getZValue(timeGrid[n], spaceGrid[j]) - f[j]); } errorSqr += correction * correction; f[j] += correction; scale += f[j] * f[j]; } } } return new PDETerminalResults1D(grid, f); } @SuppressWarnings("deprecation") @Override public PDEResults1D solve(final ZZConvectionDiffusionPDEDataBundle pdeData, final PDEGrid1D grid, final BoundaryCondition lowerBoundary, final BoundaryCondition upperBoundary) { throw new NotImplementedException("This is a simple test implimentation of Crank-Nicolson. If you do what to run this scheme, use ThetaMethodFiniteDifference with theta = 0.5"); } @SuppressWarnings("deprecation") @Override public PDEResults1D solve(final ZZConvectionDiffusionPDEDataBundle pdeData, final PDEGrid1D grid, final BoundaryCondition lowerBoundary, final BoundaryCondition upperBoundary, final Surface<Double, Double, Double> freeBoundary) { throw new NotImplementedException("This is a simple test implimentation of Crank-Nicolson. If you do what to run this scheme, use ThetaMethodFiniteDifference with theta = 0.5"); } }
apache-2.0
eveliotc/fragments
Fragments/src/main/java/info/evelio/fragments/HelloFragment.java
457
package info.evelio.fragments; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; public class HelloFragment extends Fragment { @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_hello, container, false); } }
apache-2.0
junhaozhou/old-driver
app/src/main/java/com/littlechoc/olddriver/ui/view/CustomNavigationView.java
3702
package com.littlechoc.olddriver.ui.view; import android.content.Context; import android.content.Intent; import android.support.annotation.NonNull; import android.support.design.widget.NavigationView; import android.support.v4.widget.DrawerLayout; import android.util.AttributeSet; import android.view.Gravity; import android.view.MenuItem; import android.view.View; import com.littlechoc.olddriver.R; import com.littlechoc.olddriver.obd.reader.activity.ConfigActivity; import com.littlechoc.olddriver.obd.reader.activity.MainActivity; import com.littlechoc.olddriver.ui.BluetoothActivity; import com.littlechoc.olddriver.ui.HistoryActivity; import com.littlechoc.olddriver.ui.SettingActivity; /** * @author Junhao Zhou 2017/3/19 */ public class CustomNavigationView extends NavigationView implements NavigationView.OnNavigationItemSelectedListener, DrawerLayout.DrawerListener { private static final int ACTION_NONE = 0; private static final int ACTION_HISTORY = 1; private static final int ACTION_BLUETOOTH = 2; private static final int ACTION_SETTING = 3; private static final int ACTION_LIB_CONFIG = 9; private static final int ACTION_LIB_MAIN = 10; private View headerView; private DrawerLayout drawerLayout; private int action = ACTION_NONE; public CustomNavigationView(Context context) { this(context, null); } public CustomNavigationView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public CustomNavigationView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } private void init() { setNavigationItemSelectedListener(this); headerView = getHeaderView(0); drawerLayout = (DrawerLayout) getParent(); } public void setDrawerLayout(DrawerLayout drawerLayout) { this.drawerLayout = drawerLayout; } @Override public boolean onNavigationItemSelected(@NonNull MenuItem item) { switch (item.getItemId()) { case R.id.menu_history: closeDrawer(); action = ACTION_HISTORY; return true; case R.id.menu_bluetooth: closeDrawer(); action = ACTION_BLUETOOTH; return true; case R.id.menu_setting: closeDrawer(); action = ACTION_SETTING; return true; case R.id.lib_config: closeDrawer(); action = ACTION_LIB_CONFIG; return true; case R.id.lib_main: closeDrawer(); action = ACTION_LIB_MAIN; return true; } return false; } private void closeDrawer() { if (drawerLayout != null) { drawerLayout.closeDrawer(Gravity.START); drawerLayout.removeDrawerListener(this); drawerLayout.addDrawerListener(this); } } @Override public void onDrawerSlide(View drawerView, float slideOffset) { } @Override public void onDrawerOpened(View drawerView) { } @Override public void onDrawerClosed(View drawerView) { if (ACTION_BLUETOOTH == action) { getContext().startActivity(new Intent(getContext(), BluetoothActivity.class)); } else if (ACTION_HISTORY == action) { getContext().startActivity(new Intent(getContext(), HistoryActivity.class)); } else if (ACTION_SETTING == action) { getContext().startActivity(new Intent(getContext(), SettingActivity.class)); } else if (ACTION_LIB_CONFIG == action) { getContext().startActivity(new Intent(getContext(), ConfigActivity.class)); } else if (ACTION_LIB_MAIN == action) { getContext().startActivity(new Intent(getContext(), MainActivity.class)); } action = ACTION_NONE; } @Override public void onDrawerStateChanged(int newState) { } }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-lexmodelbuilding/src/main/java/com/amazonaws/services/lexmodelbuilding/model/BuiltinSlotTypeMetadata.java
9478
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lexmodelbuilding.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Provides information about a built in slot type. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lex-models-2017-04-19/BuiltinSlotTypeMetadata" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class BuiltinSlotTypeMetadata implements Serializable, Cloneable, StructuredPojo { /** * <p> * A unique identifier for the built-in slot type. To find the signature for a slot type, see <a href= * "https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/slot-type-reference" * >Slot Type Reference</a> in the <i>Alexa Skills Kit</i>. * </p> */ private String signature; /** * <p> * A list of target locales for the slot. * </p> */ private java.util.List<String> supportedLocales; /** * <p> * A unique identifier for the built-in slot type. To find the signature for a slot type, see <a href= * "https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/slot-type-reference" * >Slot Type Reference</a> in the <i>Alexa Skills Kit</i>. * </p> * * @param signature * A unique identifier for the built-in slot type. To find the signature for a slot type, see <a href= * "https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/slot-type-reference" * >Slot Type Reference</a> in the <i>Alexa Skills Kit</i>. */ public void setSignature(String signature) { this.signature = signature; } /** * <p> * A unique identifier for the built-in slot type. To find the signature for a slot type, see <a href= * "https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/slot-type-reference" * >Slot Type Reference</a> in the <i>Alexa Skills Kit</i>. * </p> * * @return A unique identifier for the built-in slot type. To find the signature for a slot type, see <a href= * "https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/slot-type-reference" * >Slot Type Reference</a> in the <i>Alexa Skills Kit</i>. */ public String getSignature() { return this.signature; } /** * <p> * A unique identifier for the built-in slot type. To find the signature for a slot type, see <a href= * "https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/slot-type-reference" * >Slot Type Reference</a> in the <i>Alexa Skills Kit</i>. * </p> * * @param signature * A unique identifier for the built-in slot type. To find the signature for a slot type, see <a href= * "https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/slot-type-reference" * >Slot Type Reference</a> in the <i>Alexa Skills Kit</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public BuiltinSlotTypeMetadata withSignature(String signature) { setSignature(signature); return this; } /** * <p> * A list of target locales for the slot. * </p> * * @return A list of target locales for the slot. * @see Locale */ public java.util.List<String> getSupportedLocales() { return supportedLocales; } /** * <p> * A list of target locales for the slot. * </p> * * @param supportedLocales * A list of target locales for the slot. * @see Locale */ public void setSupportedLocales(java.util.Collection<String> supportedLocales) { if (supportedLocales == null) { this.supportedLocales = null; return; } this.supportedLocales = new java.util.ArrayList<String>(supportedLocales); } /** * <p> * A list of target locales for the slot. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setSupportedLocales(java.util.Collection)} or {@link #withSupportedLocales(java.util.Collection)} if you * want to override the existing values. * </p> * * @param supportedLocales * A list of target locales for the slot. * @return Returns a reference to this object so that method calls can be chained together. * @see Locale */ public BuiltinSlotTypeMetadata withSupportedLocales(String... supportedLocales) { if (this.supportedLocales == null) { setSupportedLocales(new java.util.ArrayList<String>(supportedLocales.length)); } for (String ele : supportedLocales) { this.supportedLocales.add(ele); } return this; } /** * <p> * A list of target locales for the slot. * </p> * * @param supportedLocales * A list of target locales for the slot. * @return Returns a reference to this object so that method calls can be chained together. * @see Locale */ public BuiltinSlotTypeMetadata withSupportedLocales(java.util.Collection<String> supportedLocales) { setSupportedLocales(supportedLocales); return this; } /** * <p> * A list of target locales for the slot. * </p> * * @param supportedLocales * A list of target locales for the slot. * @return Returns a reference to this object so that method calls can be chained together. * @see Locale */ public BuiltinSlotTypeMetadata withSupportedLocales(Locale... supportedLocales) { java.util.ArrayList<String> supportedLocalesCopy = new java.util.ArrayList<String>(supportedLocales.length); for (Locale value : supportedLocales) { supportedLocalesCopy.add(value.toString()); } if (getSupportedLocales() == null) { setSupportedLocales(supportedLocalesCopy); } else { getSupportedLocales().addAll(supportedLocalesCopy); } return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSignature() != null) sb.append("Signature: ").append(getSignature()).append(","); if (getSupportedLocales() != null) sb.append("SupportedLocales: ").append(getSupportedLocales()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof BuiltinSlotTypeMetadata == false) return false; BuiltinSlotTypeMetadata other = (BuiltinSlotTypeMetadata) obj; if (other.getSignature() == null ^ this.getSignature() == null) return false; if (other.getSignature() != null && other.getSignature().equals(this.getSignature()) == false) return false; if (other.getSupportedLocales() == null ^ this.getSupportedLocales() == null) return false; if (other.getSupportedLocales() != null && other.getSupportedLocales().equals(this.getSupportedLocales()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSignature() == null) ? 0 : getSignature().hashCode()); hashCode = prime * hashCode + ((getSupportedLocales() == null) ? 0 : getSupportedLocales().hashCode()); return hashCode; } @Override public BuiltinSlotTypeMetadata clone() { try { return (BuiltinSlotTypeMetadata) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.lexmodelbuilding.model.transform.BuiltinSlotTypeMetadataMarshaller.getInstance().marshall(this, protocolMarshaller); } }
apache-2.0
songshu198907/LeetPractise
src/main/java/design_pattern/command/ConfigOperator.java
361
package design_pattern.command; import java.io.Serializable; /** * Created by songheng on 9/12/16. */ public class ConfigOperator implements Serializable { public void insert(String args) { System.out.println("增加新节点 :" + args); } public void modify(String args) { System.out.println("修改节点:" + args); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-glue/src/main/java/com/amazonaws/services/glue/model/DeleteTableRequest.java
9680
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/DeleteTable" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DeleteTableRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of the Data Catalog where the table resides. If none is provided, the Amazon Web Services account ID is * used by default. * </p> */ private String catalogId; /** * <p> * The name of the catalog database in which the table resides. For Hive compatibility, this name is entirely * lowercase. * </p> */ private String databaseName; /** * <p> * The name of the table to be deleted. For Hive compatibility, this name is entirely lowercase. * </p> */ private String name; /** * <p> * The transaction ID at which to delete the table contents. * </p> */ private String transactionId; /** * <p> * The ID of the Data Catalog where the table resides. If none is provided, the Amazon Web Services account ID is * used by default. * </p> * * @param catalogId * The ID of the Data Catalog where the table resides. If none is provided, the Amazon Web Services account * ID is used by default. */ public void setCatalogId(String catalogId) { this.catalogId = catalogId; } /** * <p> * The ID of the Data Catalog where the table resides. If none is provided, the Amazon Web Services account ID is * used by default. * </p> * * @return The ID of the Data Catalog where the table resides. If none is provided, the Amazon Web Services account * ID is used by default. */ public String getCatalogId() { return this.catalogId; } /** * <p> * The ID of the Data Catalog where the table resides. If none is provided, the Amazon Web Services account ID is * used by default. * </p> * * @param catalogId * The ID of the Data Catalog where the table resides. If none is provided, the Amazon Web Services account * ID is used by default. * @return Returns a reference to this object so that method calls can be chained together. */ public DeleteTableRequest withCatalogId(String catalogId) { setCatalogId(catalogId); return this; } /** * <p> * The name of the catalog database in which the table resides. For Hive compatibility, this name is entirely * lowercase. * </p> * * @param databaseName * The name of the catalog database in which the table resides. For Hive compatibility, this name is entirely * lowercase. */ public void setDatabaseName(String databaseName) { this.databaseName = databaseName; } /** * <p> * The name of the catalog database in which the table resides. For Hive compatibility, this name is entirely * lowercase. * </p> * * @return The name of the catalog database in which the table resides. For Hive compatibility, this name is * entirely lowercase. */ public String getDatabaseName() { return this.databaseName; } /** * <p> * The name of the catalog database in which the table resides. For Hive compatibility, this name is entirely * lowercase. * </p> * * @param databaseName * The name of the catalog database in which the table resides. For Hive compatibility, this name is entirely * lowercase. * @return Returns a reference to this object so that method calls can be chained together. */ public DeleteTableRequest withDatabaseName(String databaseName) { setDatabaseName(databaseName); return this; } /** * <p> * The name of the table to be deleted. For Hive compatibility, this name is entirely lowercase. * </p> * * @param name * The name of the table to be deleted. For Hive compatibility, this name is entirely lowercase. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the table to be deleted. For Hive compatibility, this name is entirely lowercase. * </p> * * @return The name of the table to be deleted. For Hive compatibility, this name is entirely lowercase. */ public String getName() { return this.name; } /** * <p> * The name of the table to be deleted. For Hive compatibility, this name is entirely lowercase. * </p> * * @param name * The name of the table to be deleted. For Hive compatibility, this name is entirely lowercase. * @return Returns a reference to this object so that method calls can be chained together. */ public DeleteTableRequest withName(String name) { setName(name); return this; } /** * <p> * The transaction ID at which to delete the table contents. * </p> * * @param transactionId * The transaction ID at which to delete the table contents. */ public void setTransactionId(String transactionId) { this.transactionId = transactionId; } /** * <p> * The transaction ID at which to delete the table contents. * </p> * * @return The transaction ID at which to delete the table contents. */ public String getTransactionId() { return this.transactionId; } /** * <p> * The transaction ID at which to delete the table contents. * </p> * * @param transactionId * The transaction ID at which to delete the table contents. * @return Returns a reference to this object so that method calls can be chained together. */ public DeleteTableRequest withTransactionId(String transactionId) { setTransactionId(transactionId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCatalogId() != null) sb.append("CatalogId: ").append(getCatalogId()).append(","); if (getDatabaseName() != null) sb.append("DatabaseName: ").append(getDatabaseName()).append(","); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getTransactionId() != null) sb.append("TransactionId: ").append(getTransactionId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DeleteTableRequest == false) return false; DeleteTableRequest other = (DeleteTableRequest) obj; if (other.getCatalogId() == null ^ this.getCatalogId() == null) return false; if (other.getCatalogId() != null && other.getCatalogId().equals(this.getCatalogId()) == false) return false; if (other.getDatabaseName() == null ^ this.getDatabaseName() == null) return false; if (other.getDatabaseName() != null && other.getDatabaseName().equals(this.getDatabaseName()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getTransactionId() == null ^ this.getTransactionId() == null) return false; if (other.getTransactionId() != null && other.getTransactionId().equals(this.getTransactionId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCatalogId() == null) ? 0 : getCatalogId().hashCode()); hashCode = prime * hashCode + ((getDatabaseName() == null) ? 0 : getDatabaseName().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getTransactionId() == null) ? 0 : getTransactionId().hashCode()); return hashCode; } @Override public DeleteTableRequest clone() { return (DeleteTableRequest) super.clone(); } }
apache-2.0
moritalous/LineBot2
src/main/java/forest/rice/field/k/linebot/function01/beyblade/ChampManager.java
1822
package forest.rice.field.k.linebot.function01.beyblade; import java.io.IOException; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import forest.rice.field.k.linebot.function01.beyblade.champ.Champ; import okhttp3.FormBody; import okhttp3.OkHttpClient; import okhttp3.RequestBody; public class ChampManager { private static String URL = "https://beyblade.takaratomy.co.jp/_champ_g4"; public Champ getChamp() { try { OkHttpClient client = new OkHttpClient(); RequestBody body = new FormBody.Builder().add("sEcho", "1").add("iColumns", "3").add("sColumns", "") .add("iDisplayStart", "0").add("iDisplayLength", "-1").add("mDataProp_0", "0") .add("mDataProp_1", "1").add("mDataProp_2", "2").add("iSortCol_0", "1").add("sSortDir_0", "asc") .add("iSortingCols", "1").add("bSortable_0", "true").add("bSortable_1", "true") .add("bSortable_2", "false").add("prefecture", "all").build(); okhttp3.Request request = new okhttp3.Request.Builder().url(URL).post(body).build(); okhttp3.Response response = client.newCall(request).execute(); String bodyString = response.body().string(); return createResult(bodyString); } catch (JsonParseException e) { e.printStackTrace(); } catch (JsonMappingException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return null; } protected static Champ createResult(String json) throws JsonParseException, JsonMappingException, IOException { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); return mapper.readValue(json, Champ.class); } }
apache-2.0
actframework/actframework
legacy-testapp/src/main/java/testapp/endpoint/InheritedOption.java
284
package testapp.endpoint; import act.cli.Command; import act.cli.JsonView; import org.osgl.util.C; import java.util.Map; public class InheritedOption extends SingleOption { @Command("foo2") @JsonView public Map fooOnly() { return C.Map("foo", foo()); } }
apache-2.0
apache/incubator-johnzon
johnzon-mapper/src/test/java/org/apache/johnzon/mapper/GenericsTest.java
1848
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.johnzon.mapper; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import org.junit.Test; import org.superbiz.Model; public class GenericsTest { @Test public void typeVariableMultiLevel() { final String input = "{\"aalist\":[{\"detail\":\"something2\",\"name\":\"Na2\"}]," + "\"childA\":{\"detail\":\"something\",\"name\":\"Na\"},\"childB\":{}}"; final Mapper mapper = new MapperBuilder().setAttributeOrder(String::compareTo).build(); final Model model = mapper.readObject(input, Model.class); assertNotNull(model.getChildA()); assertNotNull(model.getChildB()); assertNotNull(model.getAalist()); assertEquals("something", model.getChildA().detail); assertEquals("Na", model.getChildA().name); assertEquals(1, model.getAalist().size()); assertEquals("something2", model.getAalist().iterator().next().detail); assertEquals(input, mapper.writeObjectAsString(model)); } }
apache-2.0
multi-os-engine/moe-core
moe.apple/moe.platform.ios/src/main/java/apple/metalperformanceshaders/MPSMatrixCopyToImage.java
9472
package apple.metalperformanceshaders; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSCoder; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.metal.protocol.MTLCommandBuffer; import apple.metal.protocol.MTLDevice; import apple.metal.struct.MTLOrigin; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.ByValue; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.ProtocolClassMethod; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * MPSMatrixCopyToImage * <p> * The MPSMatrixCopyToImage copies matrix data to a MPSImage. * The operation is the reverse of MPSImageCopyToMatrix. */ @Generated @Library("MetalPerformanceShaders") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class MPSMatrixCopyToImage extends MPSKernel { static { NatJ.register(); } @Generated protected MPSMatrixCopyToImage(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native MPSMatrixCopyToImage alloc(); @Owned @Generated @Selector("allocWithZone:") public static native MPSMatrixCopyToImage allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); /** * [@property] dataLayout * <p> * The data layout to use * <p> * Returns the data layout. When copying from a MPSMatrix to a MPSImage, this * describes the order in which the image values are to be stored in the buffer associated * with the MPSMatrix. * Default: MPSDataLayoutFeatureChannelsxHeightxWidth */ @Generated @Selector("dataLayout") @NUInt public native long dataLayout(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); /** * Encode a kernel that copies a MPSMatrix to a MPSImage into a command buffer * using a MTLComputeCommandEncoder. * <p> * The kernel copies feature channels from sourceMatrix to the destinationImage. * The kernel will not begin to execute until * after the command buffer has been enqueued and committed. * <p> * NOTE: The sourceMatrix.dataType must match the feature channel data type in destinationImage. * * @param commandBuffer A valid MTLCommandBuffer. * @param sourceMatrix A valid MPSMatrix or MPSTemporaryMatrix object describing the source matrix. * @param destinationImage A valid MPSImage describing the image to copy to. */ @Generated @Selector("encodeToCommandBuffer:sourceMatrix:destinationImage:") public native void encodeToCommandBufferSourceMatrixDestinationImage( @Mapped(ObjCObjectMapper.class) MTLCommandBuffer commandBuffer, MPSMatrix sourceMatrix, MPSImage destinationImage); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("init") public native MPSMatrixCopyToImage init(); @Generated @Selector("initWithCoder:") public native MPSMatrixCopyToImage initWithCoder(NSCoder aDecoder); /** * NSSecureCoding compatability * <p> * While the standard NSSecureCoding/NSCoding method * -initWithCoder: should work, since the file can't * know which device your data is allocated on, we * have to guess and may guess incorrectly. To avoid * that problem, use initWithCoder:device instead. * * @param aDecoder The NSCoder subclass with your serialized MPSKernel * @param device The MTLDevice on which to make the MPSKernel * @return A new MPSKernel object, or nil if failure. */ @Generated @Selector("initWithCoder:device:") public native MPSMatrixCopyToImage initWithCoderDevice(NSCoder aDecoder, @Mapped(ObjCObjectMapper.class) Object device); @Generated @Selector("initWithDevice:") public native MPSMatrixCopyToImage initWithDevice(@Mapped(ObjCObjectMapper.class) Object device); /** * Initialize a MPSMatrixCopyToImage object on a device * * @param device The device the kernel will run on * @param dataLayout The data layout * @return A valid MPSMatrixCopyToImage object or nil, if failure. */ @Generated @Selector("initWithDevice:dataLayout:") public native MPSMatrixCopyToImage initWithDeviceDataLayout(@Mapped(ObjCObjectMapper.class) MTLDevice device, @NUInt long dataLayout); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native MPSMatrixCopyToImage new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); /** * [@property] sourceMatrixBatchIndex * <p> * The index of the source matrix in the batch. This property is * modifiable and defaults to 0 at initialization time. */ @Generated @Selector("setSourceMatrixBatchIndex:") public native void setSourceMatrixBatchIndex(@NUInt long value); /** * [@property] sourceMatrixOrigin * <p> * The origin, relative to [0, 0] in the source matrix. * This property is modifiable and defaults * to [0, 0] at initialization time. If a different origin is desired * then this should be modified prior to encoding the kernel. The z * value must be 0. */ @Generated @Selector("setSourceMatrixOrigin:") public native void setSourceMatrixOrigin(@ByValue MTLOrigin value); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); /** * [@property] sourceMatrixBatchIndex * <p> * The index of the source matrix in the batch. This property is * modifiable and defaults to 0 at initialization time. */ @Generated @Selector("sourceMatrixBatchIndex") @NUInt public native long sourceMatrixBatchIndex(); /** * [@property] sourceMatrixOrigin * <p> * The origin, relative to [0, 0] in the source matrix. * This property is modifiable and defaults * to [0, 0] at initialization time. If a different origin is desired * then this should be modified prior to encoding the kernel. The z * value must be 0. */ @Generated @Selector("sourceMatrixOrigin") @ByValue public native MTLOrigin sourceMatrixOrigin(); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("supportsSecureCoding") public static native boolean supportsSecureCoding(); @Generated @ProtocolClassMethod("supportsSecureCoding") public boolean _supportsSecureCoding() { return supportsSecureCoding(); } @Generated @Selector("version") @NInt public static native long version_static(); }
apache-2.0
minthubk/tankz
tankz/src/com/tankz/systems/player/PlayerTankMovementSystem.java
7776
package com.tankz.systems.player; import com.artemis.Component; import com.artemis.ComponentMapper; import com.artemis.Entity; import com.artemis.EntityProcessingSystem; import com.artemis.utils.TrigLUT; import com.badlogic.gdx.Input.Keys; import com.badlogic.gdx.InputProcessor; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.physics.box2d.Body; import com.tankz.components.BrainPlayer; import com.tankz.components.Maneuver; import com.tankz.components.Physics; import com.tankz.components.TurnFactor; import com.tankz.components.Velocity; //TODO: Move the input processor to a different system? Make PlayerTankMovementSystem generic. public class PlayerTankMovementSystem extends EntityProcessingSystem implements InputProcessor { private static final float MAX_REV_VELOCITY = 10f; private static final float MAX_FWD_VELOCITY = 20f; private static final float TURN_THRUST = 20f; private static final float MAX_TURN_VELOCITY = 625f; // this affects the turning radius private static float FWD_ACCEL_FACTOR = 200.0f; private static float REV_ACCEL_FACTOR = 100.0f; protected ComponentMapper<Velocity> velocityMapper; protected ComponentMapper<TurnFactor> turnFactorMapper; protected ComponentMapper<Physics> physicsMapper; protected ComponentMapper<Maneuver> maneuverMapper; private Entity player; private float mVelocity; public PlayerTankMovementSystem() { super(BrainPlayer.class); } /** * This provides sub-classes the ability to indicate which components they are interested in... * * @param requiredType * @param otherTypes */ public PlayerTankMovementSystem(Class<? extends Component> requiredType, Class<? extends Component>... otherTypes) { super(requiredType, otherTypes); } @Override public void initialize() { velocityMapper = new ComponentMapper<Velocity>(Velocity.class, world); turnFactorMapper = new ComponentMapper<TurnFactor>(TurnFactor.class, world); physicsMapper = new ComponentMapper<Physics>(Physics.class, world); maneuverMapper = new ComponentMapper<Maneuver>(Maneuver.class, world); } @Override protected void process(Entity e) { updatePlayer(e); } @Override protected boolean checkProcessing() { return true; } protected void updatePlayer(Entity e) { Velocity v = velocityMapper.get(e); TurnFactor tf = turnFactorMapper.get(e); Physics c = physicsMapper.get(e); Maneuver m = maneuverMapper.get(e); // get the maneuvers for this entity... updateMoving(e,c,v,m); updateTurning(c, v, tf, m, world.getDelta()); } private void updateMoving(Entity e, Physics physics, Velocity v, Maneuver m) { if (m.getForward()) { float angle = physics.getRotationInRadians(); float ax = (TrigLUT.cos(angle)); float ay = (TrigLUT.sin(angle)); mVelocity = physics.getBody().getLinearVelocity().len(); if (mVelocity < MAX_FWD_VELOCITY) { float mass = physics.getBody().getMass(); physics.getBody().applyForceToCenter(mass*ax*FWD_ACCEL_FACTOR,mass*ay*FWD_ACCEL_FACTOR); } } else if (mVelocity > 0) { mVelocity = 0; } if (m.getReverse()) { float angle = physics.getRotationInRadians(); float ax = (TrigLUT.cos(angle)); float ay = (TrigLUT.sin(angle)); mVelocity = physics.getBody().getLinearVelocity().len(); if (physics.getBody().getLinearVelocity().len() < MAX_REV_VELOCITY) { float mass = physics.getBody().getMass(); physics.getBody().applyForceToCenter(-mass*ax*REV_ACCEL_FACTOR,-mass*ay*REV_ACCEL_FACTOR); } mVelocity = -mVelocity; // reverse } else if (mVelocity < 0) { mVelocity += world.getDelta() * 1f; if (mVelocity > 0) { mVelocity = 0; } } v.setVelocity(mVelocity); } private void updateTurning(Physics physics, Velocity v, TurnFactor tf, Maneuver m, int delta) { float turnFactor = tf.getFactor(); if (m.getRight()) { turnFactor += delta * TURN_THRUST; if (turnFactor > MAX_TURN_VELOCITY) { turnFactor = MAX_TURN_VELOCITY; } m.setTurning(true); } else if (m.getLeft()) { turnFactor -= delta * TURN_THRUST; if (turnFactor < -MAX_TURN_VELOCITY) { turnFactor = -MAX_TURN_VELOCITY; } m.setTurning(true); } if (!m.getRight() && !m.getLeft() && m.getTurning()) { if (turnFactor > 0) { turnFactor -= delta * TURN_THRUST; if (turnFactor <= 0) { turnFactor = 0; m.setTurning(false); } } else { turnFactor += delta * TURN_THRUST; if (turnFactor >= 0) { turnFactor = 0; m.setTurning(false); } } } if (m.getTurning()) { // turning factor force is proportional to speed Vector2 velocity = physics.getBody().getLinearVelocity(); updateRotating(physics.getBody(),turnFactor*velocity.len2(),m); } tf.setFactor(turnFactor); } private void updateRotating(Body b, float factor, Maneuver m) { if (m.getRight() || m.getLeft()) { if (m.getReverse()) { b.applyTorque(factor); } else { b.applyTorque(-factor); } } } @Override public boolean keyDown(int key) { ensurePlayerEntity(); if (player == null) { return false; } Maneuver m = maneuverMapper.get(player); if (key == Keys.W) { m.setForward(true); return true; } else if (key == Keys.S) { m.setReverse(true); return true; } else if (key == Keys.A) { m.setLeft(true); return true; } else if (key == Keys.D) { m.setRight(true); return true; } return false; } @Override public boolean keyUp(int key) { ensurePlayerEntity(); if (player == null) { return false; } Maneuver m = maneuverMapper.get(player); if (key == Keys.W) { m.setForward(false); return true; } else if (key == Keys.S) { m.setReverse(false); return true; } else if (key == Keys.A) { m.setLeft(false); return true; } else if (key == Keys.D) { m.setRight(false); return true; } return false; } private void ensurePlayerEntity() { if (player == null || !player.isActive()) { player = world.getTagManager().getEntity("PLAYER"); } } @Override public boolean keyTyped(char character) { return false; } @Override public boolean touchDown(int x, int y, int pointer, int button) { return false; } @Override public boolean touchUp(int x, int y, int pointer, int button) { return false; } @Override public boolean touchDragged(int x, int y, int pointer) { return false; } @Override public boolean touchMoved(int x, int y) { return false; } @Override public boolean scrolled(int amount) { return false; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-rds/src/main/java/com/amazonaws/services/rds/model/transform/OptionGroupQuotaExceededExceptionUnmarshaller.java
1633
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.rds.model.transform; import org.w3c.dom.Node; import javax.annotation.Generated; import com.amazonaws.AmazonServiceException; import com.amazonaws.transform.StandardErrorUnmarshaller; import com.amazonaws.services.rds.model.OptionGroupQuotaExceededException; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class OptionGroupQuotaExceededExceptionUnmarshaller extends StandardErrorUnmarshaller { public OptionGroupQuotaExceededExceptionUnmarshaller() { super(OptionGroupQuotaExceededException.class); } @Override public AmazonServiceException unmarshall(Node node) throws Exception { // Bail out if this isn't the right error code that this // marshaller understands String errorCode = parseErrorCode(node); if (errorCode == null || !errorCode.equals("OptionGroupQuotaExceededFault")) return null; OptionGroupQuotaExceededException e = (OptionGroupQuotaExceededException) super.unmarshall(node); return e; } }
apache-2.0
alexp82/spring-rest-book
src/main/java/com/keba/rest/poll/domain/Vote.java
508
package com.keba.rest.poll.domain; import javax.persistence.*; @Entity public class Vote { @Id @GeneratedValue @Column(name = "VOTE_ID") private Long id; @ManyToOne @JoinColumn(name = "OPTION_ID") private Option option; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Option getOption() { return option; } public void setOption(Option option) { this.option = option; } }
apache-2.0
vivantech/kc_fixes
src/main/java/org/kuali/kra/award/printing/xmlstream/AwardTemplateXmlStream.java
21902
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.award.printing.xmlstream; import noNamespace.*; import noNamespace.ContactType; import noNamespace.ReportTermDetailsType.MailCopies; import noNamespace.TemplateDocument.Template; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlbeans.XmlObject; import org.kuali.kra.award.home.*; import org.kuali.kra.award.paymentreports.Frequency; import org.kuali.kra.award.paymentreports.FrequencyBase; import org.kuali.kra.award.paymentreports.Report; import org.kuali.kra.award.paymentreports.ReportClass; import org.kuali.kra.award.printing.AwardPrintType; import org.kuali.kra.bo.KraPersistableBusinessObjectBase; import org.kuali.kra.bo.Rolodex; import org.kuali.kra.bo.Sponsor; import org.kuali.kra.bo.SponsorTerm; import org.kuali.kra.document.ResearchDocumentBase; import org.kuali.kra.printing.util.PrintingUtils; import org.kuali.kra.printing.xmlstream.XmlStream; import org.kuali.rice.core.api.datetime.DateTimeService; import org.kuali.rice.krad.service.BusinessObjectService; import java.sql.Date; import java.util.*; /** * This class generates XML that conforms with the XSD related to Award Template * Report. The data for XML is derived from {@link ResearchDocumentBase} and * {@link Map} of details passed to the class. * * */ public class AwardTemplateXmlStream implements XmlStream { private static final Log LOG = LogFactory.getLog(AwardTemplateXmlStream.class); private BusinessObjectService businessObjectService = null; private DateTimeService dateTimeService = null; private static final String SCHOOL_NAME = "SCHOOL_NAME"; private static final String SCHOOL_ACRONYM = "SCHOOL_ACRONYM"; private String previousDescription=""; /** * This method generates XML for Award template Report. It uses data passed * in {@link ResearchDocumentBase} for populating the XML nodes. The XMl * once generated is returned as {@link XmlObject} * * @param printableBusinessObject * using which XML is generated * @param reportParameters * parameters related to XML generation * @return {@link XmlObject} representing the XML */ public Map<String, XmlObject> generateXmlStream( KraPersistableBusinessObjectBase printableBusinessObject, Map<String, Object> reportParameters) { Map<String, XmlObject> awardTemplateXmlStream = new HashMap<String, XmlObject>(); AwardTemplate awardTemplate = (AwardTemplate) printableBusinessObject; TemplateDocument templateDocument = TemplateDocument.Factory.newInstance(); if (awardTemplate != null) { templateDocument.setTemplate(getTemplate(awardTemplate)); } awardTemplateXmlStream.put(AwardPrintType.AWARD_TEMPLATE.getAwardPrintType(), templateDocument); return awardTemplateXmlStream; } /* * This method will set the values to template elements and finally returns * the template xml object. */ private Template getTemplate(AwardTemplate awardTemplate) { Template template = Template.Factory.newInstance(); awardTemplate.refreshNonUpdateableReferences(); template.setTemplateMaster(getTemplateMaster(awardTemplate)); template.setSchoolInfo(getSchoolInfoType()); template.setCommentArray(getCommentType(awardTemplate)); template.setContactArray(getContactType(awardTemplate)); template.setTermArray(getTerms(awardTemplate)); template.setReportArray(getReportTermTypes(awardTemplate)); return template; } private TemplateMasterData getTemplateMaster(AwardTemplate awardTemplate) { TemplateMasterData templateMasterData = TemplateMasterData.Factory.newInstance(); templateMasterData.setCurrentDate(getDateTimeService().getCurrentCalendar()); templateMasterData.setDescription(awardTemplate.getDescription()); templateMasterData.setTemplateCode(awardTemplate.getTemplateCode()); if(awardTemplate.getBasisOfPaymentCode()!=null){ BasisPaymentType basisPayment = templateMasterData.addNewBasisPayment(); AwardBasisOfPayment awardBasisOfPayment = awardTemplate.getAwardBasisOfPayment(); if(awardBasisOfPayment!=null){ basisPayment.setBasisPaymentCode(awardBasisOfPayment.getBasisOfPaymentCode()); basisPayment.setBasisPaymentDesc(awardBasisOfPayment.getDescription()); } } if(awardTemplate.getCompetingRenewalPrpslDueCode()!=null){ CompetingRenewalType competingRenewal = templateMasterData.addNewCompetingRenewal(); competingRenewal.setCompetingRenewalCode(awardTemplate.getCompetingRenewalPrpslDueCode()); } if(awardTemplate.getNonCompetingContPrpslDueCode()!=null){ NonCompetingContType nonCompetingCont = templateMasterData.addNewNonCompetingCont(); nonCompetingCont.setNonCompetingContCode(awardTemplate.getNonCompetingContPrpslDueCode()); } if(awardTemplate.getMethodOfPaymentCode()!=null){ AwardMethodOfPayment awardMethodOfPayment = awardTemplate.getAwardMethodOfPayment(); PaymentMethodType paymentMethod = templateMasterData.addNewPaymentMethod(); paymentMethod.setPaymentMethodCode(awardMethodOfPayment.getMethodOfPaymentCode()); paymentMethod.setPaymentMethodDesc(awardMethodOfPayment.getDescription()); } if(awardTemplate.getPrimeSponsorCode()!=null){ SponsorType sponsorType = templateMasterData.addNewPrimeSponsor(); Sponsor sponsor = awardTemplate.getPrimeSponsor(); sponsorType.setSponsorCode(sponsor.getSponsorCode()); sponsorType.setSponsorName(sponsor.getSponsorName()); } if(awardTemplate.getStatusCode()!=null){ TemplateStatusType templateStatus = templateMasterData.addNewTemplateStatus(); AwardStatus awardTemplateStatus = awardTemplate.getAwardTemplateStatus(); templateStatus.setStatusCode(Integer.parseInt(awardTemplateStatus.getStatusCode())); templateStatus.setStatusDesc(awardTemplateStatus.getDescription()); } return templateMasterData; } private TermType[] getTerms(AwardTemplate awardTemplate) { List<TermType> termTypes = new ArrayList<TermType>(); for (AwardTemplateTerm awardTemplateTerm : awardTemplate.getAwardSponsorTerms()) { TermType termType = TermType.Factory.newInstance(); setTermDetails(termType,awardTemplateTerm); termTypes.add(termType); } return termTypes.toArray(new TermType[0]); } private void setTermDetails(TermType termType,AwardTemplateTerm awardTemplateTerm) { awardTemplateTerm.refreshNonUpdateableReferences(); SponsorTerm sponsorTerm = awardTemplateTerm.getSponsorTerm(); if(sponsorTerm!=null){ if(previousDescription.equals("")||!previousDescription.equals(awardTemplateTerm.getSponsorTerm().getSponsorTermType().getDescription())){ termType.setDescription(awardTemplateTerm.getSponsorTerm().getSponsorTermType().getDescription()); previousDescription=awardTemplateTerm.getSponsorTerm().getSponsorTermType().getDescription(); } TermDetailsType termDetails = termType.addNewTermDetails(); termDetails.setTermCode(Integer.parseInt(sponsorTerm.getSponsorTermCode())); termDetails.setTermDescription(sponsorTerm.getDescription()); } } /* * This method will set the values to report term types elements and finally * returns the report term type.It iterates over the Award Template Report * Term. */ private ReportTermType[] getReportTermTypes(AwardTemplate awardTemplate) { List<ReportTermType> reportTermTypes = new ArrayList<ReportTermType>(); ReportTermType reportTermType = null; for (AwardTemplateReportTerm awardTemplateReportTerm : awardTemplate .getTemplateReportTerms()) { reportTermType = ReportTermType.Factory.newInstance(); awardTemplateReportTerm.refreshNonUpdateableReferences(); ReportClass reportClass = awardTemplateReportTerm.getReportClass(); if (reportClass != null && reportClass.getDescription() != null) { reportTermType.setDescription(reportClass.getDescription()); } reportTermType.setReportTermDetailsArray(getReportTermDetails(awardTemplateReportTerm)); reportTermTypes.add(reportTermType); } return reportTermTypes.toArray(new ReportTermType[0]); } /* * This method will set the values to report term details elements and * finally return the array of the report term details type.It iterates over * the */ private ReportTermDetailsType[] getReportTermDetails(AwardTemplateReportTerm awardTemplateReportTerm) { List<ReportTermDetailsType> reportTermDetailsTypes = new ArrayList<ReportTermDetailsType>(); ReportTermDetailsType reportTermDetailsType = null; reportTermDetailsType = ReportTermDetailsType.Factory.newInstance(); Date dueDate = awardTemplateReportTerm.getDueDate(); if (dueDate != null) { reportTermDetailsType.setDueDate(dateTimeService.getCalendar(dueDate)); } setFrequencyBaseDetails(awardTemplateReportTerm, reportTermDetailsType); setFrequencyDetails(awardTemplateReportTerm, reportTermDetailsType); setOspDistributionDetails(awardTemplateReportTerm,reportTermDetailsType); setReportClassDetails(awardTemplateReportTerm, reportTermDetailsType); setReportDetails(awardTemplateReportTerm, reportTermDetailsType); reportTermDetailsType.setMailCopiesArray(getMailCopies(awardTemplateReportTerm)); reportTermDetailsTypes.add(reportTermDetailsType); return reportTermDetailsTypes.toArray(new ReportTermDetailsType[0]); } /* * This method will set the values to report term details of report * elements. */ private void setReportDetails( AwardTemplateReportTerm awardTemplateReportTerm, ReportTermDetailsType reportTermDetailsType) { Report report = awardTemplateReportTerm.getReport(); String reportCode = report.getReportCode(); String reportDescription = report.getDescription(); if (reportCode != null) { reportTermDetailsType.setReportCode(Integer.valueOf(reportCode)); } if (reportDescription != null) { reportTermDetailsType.setReportCodeDesc(reportDescription); } } /* * This method will set the values to report term details of report class * elements. */ private void setReportClassDetails( AwardTemplateReportTerm awardTemplateReportTerm, ReportTermDetailsType reportTermDetailsType) { ReportClass reportClass = awardTemplateReportTerm.getReportClass(); String reportClassCode = reportClass.getReportClassCode(); String reportClassDescription = reportClass.getDescription(); if (reportClassCode != null) { reportTermDetailsType.setReportClassCode(Integer .valueOf(reportClassCode)); } if (reportClassDescription != null) { reportTermDetailsType.setReportCodeDesc(reportClassDescription); } } /* * This method will set the values to report term details of OSP * distribution elements. */ private void setOspDistributionDetails( AwardTemplateReportTerm awardTemplateReportTerm, ReportTermDetailsType reportTermDetailsType) { String ospDistributionCode = awardTemplateReportTerm .getOspDistributionCode(); if (ospDistributionCode != null) { reportTermDetailsType.setOSPDistributionCode(Integer .valueOf(ospDistributionCode)); } Distribution distribution = awardTemplateReportTerm.getDistribution(); String ospDescription = distribution.getDescription(); if (ospDescription != null) { reportTermDetailsType.setOSPDistributionDesc(ospDescription); } } /* * This method will set the values to report term details of frequency * elements. */ private void setFrequencyDetails( AwardTemplateReportTerm awardTemplateReportTerm, ReportTermDetailsType reportTermDetailsType) { Frequency frequency = awardTemplateReportTerm.getFrequency(); String frequencyCode = frequency.getFrequencyCode(); String desription = frequency.getDescription(); if (frequencyCode != null) { reportTermDetailsType.setFrequencyCode(Integer .valueOf(frequencyCode)); } if (desription != null) { reportTermDetailsType.setFrequencyCodeDesc(desription); } } /* * This method will set the values to report term details of frequency base * elements. */ private void setFrequencyBaseDetails( AwardTemplateReportTerm awardTemplateReportTerm, ReportTermDetailsType reportTermDetailsType) { String frequencyBaseCode = awardTemplateReportTerm.getFrequencyBaseCode(); if (frequencyBaseCode != null) { reportTermDetailsType.setFrequencyBaseCode(Integer .valueOf(frequencyBaseCode)); } awardTemplateReportTerm.refreshNonUpdateableReferences(); FrequencyBase frequencyBase = awardTemplateReportTerm.getFrequencyBase(); if(frequencyBase!=null){ String description = frequencyBase.getDescription(); if (description != null) { reportTermDetailsType.setFrequencyBaseDesc(description); } } } /* * This method will set the values to mail copies elements and finally * return the array of mail copies.It iterates over the award template * report term recipient. */ private MailCopies[] getMailCopies( AwardTemplateReportTerm awardTemplateReportTerm) { List<MailCopies> mailCopiesList = new ArrayList<MailCopies>(); MailCopies mailCopies = null; for (AwardTemplateReportTermRecipient awardTemplateReportTermRecipient : awardTemplateReportTerm .getAwardTemplateReportTermRecipients()) { mailCopies = MailCopies.Factory.newInstance(); awardTemplateReportTermRecipient.refreshNonUpdateableReferences(); org.kuali.kra.award.home.ContactType contactType = awardTemplateReportTermRecipient.getContactType(); if(contactType!=null){ String contactTypeCode = contactType.getContactTypeCode(); String contactTypeDescription = contactType.getDescription(); if (contactTypeCode != null) { mailCopies.setContactTypeCode(Integer.valueOf(contactTypeCode)); } if (contactTypeDescription != null) { mailCopies.setContactTypeDesc(contactTypeDescription); } } Integer numberofmailCopies = awardTemplateReportTermRecipient.getNumberOfCopies(); if (numberofmailCopies != null) { mailCopies.setNumberOfCopies(String.valueOf(numberofmailCopies)); } Integer rolodexid = awardTemplateReportTermRecipient.getRolodexId(); if (rolodexid != null) { mailCopies.setRolodexId(String.valueOf(rolodexid)); } mailCopiesList.add(mailCopies); } return mailCopiesList.toArray(new MailCopies[0]); } /* * This method will set the values to contact type elements and finally * return the contact type array. From AwardTemplate get the list of * AwardTemplateContact and iterates over it. */ private ContactType[] getContactType(AwardTemplate awardTemplate) { List<ContactType> contactTypes = new ArrayList<ContactType>(); ContactType contactType = null; for (AwardTemplateContact awardTemplateContact : awardTemplate .getTemplateContacts()) { contactType = ContactType.Factory.newInstance(); awardTemplateContact.refreshNonUpdateableReferences(); org.kuali.kra.award.home.ContactType type = awardTemplateContact .getContactType(); String contactTypeCode = null; String description =null; if (type != null) { contactTypeCode = type.getContactTypeCode(); description= type.getDescription(); } if (contactTypeCode != null) { contactType .setContactTypeCode(Integer.valueOf(contactTypeCode)); } if (description != null) { contactType.setContactTypeDesc(description); } setRolodexDetails(contactType,awardTemplateContact); contactTypes.add(contactType); } return contactTypes.toArray(new ContactType[0]); } private void setRolodexDetails(ContactType contactType, AwardTemplateContact awardTemplateContact) { if(awardTemplateContact.getRolodexId()!=null){ Rolodex rolodex = awardTemplateContact.getRolodex(); if(rolodex!=null){ RolodexDetailsType rolodexDetails = contactType.addNewRolodexDetails(); rolodexDetails.setAddress1(rolodex.getAddressLine1()); rolodexDetails.setAddress2(rolodex.getAddressLine2()); rolodexDetails.setAddress3(rolodex.getAddressLine3()); rolodexDetails.setCity(rolodex.getCity()); rolodexDetails.setComments(rolodex.getComments()); rolodexDetails.setCountryCode(rolodex.getCountryCode()); rolodexDetails.setCountryDescription(rolodex.getCountryCode()); rolodexDetails.setCounty(rolodex.getCounty()); rolodexDetails.setEmail(rolodex.getEmailAddress()); rolodexDetails.setFax(rolodex.getFaxNumber()); rolodexDetails.setFirstName(rolodex.getFirstName()); rolodexDetails.setLastName(rolodex.getLastName()); rolodexDetails.setMiddleName(rolodex.getMiddleName()); rolodexDetails.setOrganization(rolodex.getOrganization()); rolodexDetails.setOwnedByUnit(rolodex.getOwnedByUnit()); if(rolodex.getUnit()!=null){ rolodexDetails.setOwnedByUnitName(rolodex.getUnit().getUnitName()); } rolodexDetails.setPhoneNumber(rolodex.getPhoneNumber()); rolodexDetails.setPostalCode(rolodex.getPostalCode()); rolodexDetails.setPrefix(rolodex.getPrefix()); rolodexDetails.setRolodexId(rolodex.getRolodexId().toString()); rolodexDetails.setSponsorCode(rolodex.getSponsorCode()); if(rolodex.getSponsor()!=null){ rolodexDetails.setSponsorName(rolodex.getSponsor().getSponsorName()); } rolodexDetails.setStateCode(rolodex.getState()); rolodexDetails.setStateDescription(rolodex.getSponsorCode()); rolodexDetails.setSuffix(rolodex.getSuffix()); rolodexDetails.setTitle(rolodex.getTitle()); } } } /* * This method will set the values to comment type elements and finally * return the comment type array. From AwardTemplate get the list of * AwardTemplateComment and iterates over it. */ private CommentType[] getCommentType(AwardTemplate awardTemplate) { List<CommentType> commentTypes = new ArrayList<CommentType>(); List<AwardTemplateComment> templateComments = awardTemplate .getTemplateComments(); CommentType commentType = null; ArrayList templateCommentList=new ArrayList(); HashMap<String,String> templateCommentHm=new HashMap<String,String>(); for (AwardTemplateComment awardTemplateComment : templateComments) { String commentTypeCode = awardTemplateComment.getCommentTypeCode(); AwardTemplate template = awardTemplateComment.getTemplate(); String description = null; if (template != null) { awardTemplateComment.refreshReferenceObject("commentType"); description= awardTemplateComment.getCommentType().getDescription(); templateCommentList.add(description); } String comments = awardTemplateComment.getComments(); if(comments!=null && description!=null){ templateCommentHm.put(description, comments); } } Collections.sort(templateCommentList); for (int templateComment=0;templateComment<templateCommentList.size();templateComment++){ if(templateCommentHm.containsKey(templateCommentList.get(templateComment))==true){ commentType = CommentType.Factory.newInstance(); String comments=(String)templateCommentHm.get(templateCommentList.get(templateComment)); String description =templateCommentList.get(templateComment).toString(); commentType.setDescription(description); commentType.setComments(comments); } commentTypes.add(commentType); } return commentTypes.toArray(new CommentType[0]); } /* * This method will set the values to school info attributes and finally * returns SchoolInfoType XmlObject */ private SchoolInfoType getSchoolInfoType() { SchoolInfoType schoolInfoType = SchoolInfoType.Factory.newInstance(); String schoolName = getAwardParameterValue(SCHOOL_NAME); String schoolAcronym = getAwardParameterValue(SCHOOL_ACRONYM); if (schoolName != null) { schoolInfoType.setSchoolName(schoolName); } if (schoolAcronym != null) { schoolInfoType.setAcronym(schoolAcronym); } return schoolInfoType; } public BusinessObjectService getBusinessObjectService() { return businessObjectService; } public void setBusinessObjectService( BusinessObjectService businessObjectService) { this.businessObjectService = businessObjectService; } public DateTimeService getDateTimeService() { return dateTimeService; } public void setDateTimeService(DateTimeService dateTimeService) { this.dateTimeService = dateTimeService; } private String getAwardParameterValue(String param) { String value = null; try { value = PrintingUtils.getParameterValue(param); } catch (Exception e) { LOG.error(e.getMessage(), e); } return value; } }
apache-2.0
pombredanne/mortar-api-java
src/main/java/com/mortardata/api/v2/Describes.java
6173
/* * Copyright 2013 Mortar Data Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mortardata.api.v2; import com.google.api.client.http.HttpRequest; import com.google.api.client.util.Key; import java.io.IOException; import java.util.HashMap; import java.util.Map; /** * Run and fetch describe requests from the Mortar API. * * @see <a href="http://help.mortardata.com/reference/api/api_version_2" target="_blank"> * http://help.mortardata.com/reference/api/api_version_2</a> */ public class Describes { private API api; /** * Construct a Describes V2 API. * * @param api API client */ public Describes(API api) { this.api = api; } /** * Run a Pig DESCRIBE operation. * * @param alias Pig alias to describe * @param gitRef version of code (git hash) to use * @param projectName Mortar project to use * @param pigScriptName Pigscript to use (without path or extension) * @return describe_id ID of the describe that was requested * @throws IOException if unable to run describe on API */ public String postDescribe(String alias, String gitRef, String projectName, String pigScriptName) throws IOException { HashMap<String, String> arguments = new HashMap<String, String>(); arguments.put("alias", alias); arguments.put("git_ref", gitRef); arguments.put("project_name", projectName); arguments.put("pigscript_name", pigScriptName); HttpRequest request = this.api.buildHttpPostRequest("describes", arguments); return (String) request.execute().parseAs(HashMap.class).get("describe_id"); } /** * Get the results of a Pig DESCRIBE operation. * * @param describeId ID of the describe * @return requested DescribeResult * @throws IOException if describe does not exist or unable to fetch from the API */ public DescribeResult getDescribe(String describeId) throws IOException { HttpRequest request = this.api.buildHttpGetRequest("describes/" + describeId); return request.execute().parseAs(DescribeResult.class); } /** * Get the results of a Pig DESCRIBE operation. * * @param describeId ID of the describe * @param excludeResult whether to exclude the result field (default: false) * @return requested DescribeResult * @throws IOException if describe does not exist or unable to fetch from the API */ public DescribeResult getDescribe(String describeId, boolean excludeResult) throws IOException { HttpRequest request = this.api.buildHttpGetRequest("describes/" + describeId + "?exclude_result=" + excludeResult); return request.execute().parseAs(DescribeResult.class); } /** * Result of a Pig DESCRIBE. */ public static class DescribeResult { @Key("project_name") private String projectName; @Key("alias") private String alias; @Key("git_ref") private String gitRef; @Key("script_name") private String scriptName; @Key("describe_id") private String describeId; @Key("status_code") private String statusCode; @Key("status_description") private String statusDescription; @Key("web_result_url") private String webResultUrl; @Key("result") private Map<String, Object> result; /** * Name of the Mortar project for the describe. */ public String getProjectName() { return projectName; } /** * Pig alias described. */ public String getAlias() { return alias; } /** * Git hash or branch at which describe was run. */ public String getGitRef() { return gitRef; } /** * Name of the script that was described. */ public String getScriptName() { return scriptName; } /** * ID of the describe. */ public String getDescribeId() { return describeId; } /** * Describe status code. */ public TaskStatus getStatusCode() { return TaskStatus.getEnum(statusCode); } /** * Describe status code original string. */ public String getStatusCodeString() { return statusCode; } /** * Full description of describe status. */ public String getStatusDescription() { return statusDescription; } /** * URL to view describe results. */ public String getWebResultUrl() { return webResultUrl; } /** * Describe results. */ public Map<String, Object> getResult() { return result; } @Override public String toString() { return "DescribeResult [" + "projectName='" + projectName + '\'' + ", alias='" + alias + '\'' + ", gitRef='" + gitRef + '\'' + ", scriptName='" + scriptName + '\'' + ", describeId='" + describeId + '\'' + ", statusCode=" + statusCode + ", statusDescription='" + statusDescription + '\'' + ", webResultUrl='" + webResultUrl + '\'' + ", result=" + result + ']'; } } }
apache-2.0
panelion/incubator-stratos
components/org.apache.stratos.cloud.controller/src/main/java/org/apache/stratos/cloud/controller/pojo/Registrant.java
3076
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.cloud.controller.pojo; /** * Upon a new subscription, Stratos Manager would send this POJO. * @author nirmal * */ public class Registrant { private String clusterId; private String tenantRange; private String hostName; private String cartridgeType; private String payload; private Properties properties; private String autoScalerPolicyName; private String deploymentPolicyName; public String getTenantRange() { return tenantRange; } public void setTenantRange(String tenantRange) { this.tenantRange = tenantRange; } public String getHostName() { return hostName; } public void setHostName(String hostName) { this.hostName = hostName; } public Properties getProperties() { return properties; } public void setProperties(Properties properties) { this.properties = properties; } public String getAutoScalerPolicyName() { return autoScalerPolicyName; } public void setAutoScalerPolicyName(String autoScalerPolicyName) { this.autoScalerPolicyName = autoScalerPolicyName; } public String getClusterId() { return clusterId; } public void setClusterId(String clusterId) { this.clusterId = clusterId; } public String getCartridgeType() { return cartridgeType; } public void setCartridgeType(String cartridgeType) { this.cartridgeType = cartridgeType; } public String getPayload() { return payload; } public void setPayload(String payload) { this.payload = payload; } public String getDeploymentPolicyName() { return deploymentPolicyName; } public void setDeploymentPolicyName(String deploymentPolicyName) { this.deploymentPolicyName = deploymentPolicyName; } @Override public String toString() { return "Registrant [clusterId=" + clusterId + ", tenantRange=" + tenantRange + ", hostName=" + hostName + ", cartridgeType=" + cartridgeType + ", properties=" + properties + ", autoScalerPolicyName=" + autoScalerPolicyName + ", deploymentPolicyName=" + deploymentPolicyName + "]"; } }
apache-2.0
gspandy/divconq
divconq.core/src/main/java/divconq/lang/CountDownCallback.java
1877
/* ************************************************************************ # # DivConq # # http://divconq.com/ # # Copyright: # Copyright 2014 eTimeline, LLC. All rights reserved. # # License: # See the license.txt file in the project's top-level directory for details. # # Authors: # * Andy White # ************************************************************************ */ package divconq.lang; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import divconq.lang.op.OperationCallback; import divconq.lang.op.OperationResult; public class CountDownCallback { protected AtomicInteger count = null; protected OperationCallback callback = null; protected ReentrantLock cdlock = new ReentrantLock(); // TODO try StampedLock public CountDownCallback(int count, OperationCallback callback) { this.count = new AtomicInteger(count); this.callback = callback; } public int countDown() { this.cdlock.lock(); try { int res = this.count.decrementAndGet(); if (res < 0) res = 0; if (res == 0) this.callback.complete(); return res; } finally { this.cdlock.unlock(); } } public int countDown(OperationResult res) { this.cdlock.lock(); try { // we should use the SubContext approach to capture messages //this.callback.copyMessages(res); return this.countDown(); } finally { this.cdlock.unlock(); } } public int increment() { return this.count.incrementAndGet(); } public int increment(int amt) { return this.count.addAndGet(amt); } /* TODO remove if possible public void setContext(OperationContext taskContext) { this.callback.setContext(taskContext); } */ public int value() { return this.count.intValue(); } }
apache-2.0
robovm/robovm-samples
ios-no-ib/tabster-no-ib/src/main/java/org/robovm/samples/tabster/ui/SubLevelViewController.java
3498
/* * Copyright (C) 2014 RoboVM AB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Portions of this code is based on Apple Inc's Tabster sample (v1.6) * which is copyright (C) 2011-2014 Apple Inc. */ package org.robovm.samples.tabster.ui; import java.util.Arrays; import java.util.List; import org.robovm.apple.foundation.NSIndexPath; import org.robovm.apple.uikit.UITableView; import org.robovm.apple.uikit.UITableViewCell; import org.robovm.apple.uikit.UITableViewCellAccessoryType; import org.robovm.apple.uikit.UITableViewCellSelectionStyle; import org.robovm.apple.uikit.UITableViewCellStyle; import org.robovm.apple.uikit.UITableViewController; import org.robovm.apple.uikit.UITableViewDataSourceAdapter; import org.robovm.apple.uikit.UITableViewDelegateAdapter; public class SubLevelViewController extends UITableViewController { private String currentSelectionTitle; private List<String> dataList; private ModalViewController modalViewController; public SubLevelViewController () { dataList = Arrays.asList("Feature 1", "Feature 2"); UITableView tableView = getTableView(); tableView.setAlwaysBounceVertical(true); tableView.setDelegate(new UITableViewDelegateAdapter() { @Override public void didSelectRow (UITableView tableView, NSIndexPath indexPath) { tableView.deselectRow(indexPath, false); } @Override public void accessoryButtonTapped (UITableView tableView, NSIndexPath indexPath) { modalViewController.setOwningViewController(SubLevelViewController.this); UITableViewCell cell = tableView.getCellForRow(indexPath); currentSelectionTitle = cell.getTextLabel().getText(); presentViewController(modalViewController, true, null); } }); tableView.setDataSource(new UITableViewDataSourceAdapter() { @Override public long getNumberOfRowsInSection (UITableView tableView, long section) { return dataList.size(); } @Override public UITableViewCell getCellForRow (UITableView tableView, NSIndexPath indexPath) { final String identifier = "cellID2"; UITableViewCell cell = tableView.dequeueReusableCell(identifier); if (cell == null) { cell = new UITableViewCell(UITableViewCellStyle.Default, identifier); cell.setAccessoryType(UITableViewCellAccessoryType.DetailDisclosureButton); cell.setSelectionStyle(UITableViewCellSelectionStyle.Blue); } cell.getTextLabel().setText(dataList.get((int)indexPath.getRow())); return cell; } }); modalViewController = new ModalViewController(); } public String getCurrentSelectionTitle () { return currentSelectionTitle; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-redshiftdataapi/src/main/java/com/amazonaws/services/redshiftdataapi/model/transform/BatchExecuteStatementRequestMarshaller.java
4048
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.redshiftdataapi.model.transform; import java.util.List; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.redshiftdataapi.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * BatchExecuteStatementRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class BatchExecuteStatementRequestMarshaller { private static final MarshallingInfo<String> CLUSTERIDENTIFIER_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ClusterIdentifier").build(); private static final MarshallingInfo<String> DATABASE_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Database").build(); private static final MarshallingInfo<String> DBUSER_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("DbUser").build(); private static final MarshallingInfo<String> SECRETARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("SecretArn").build(); private static final MarshallingInfo<List> SQLS_BINDING = MarshallingInfo.builder(MarshallingType.LIST).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Sqls").build(); private static final MarshallingInfo<String> STATEMENTNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("StatementName").build(); private static final MarshallingInfo<Boolean> WITHEVENT_BINDING = MarshallingInfo.builder(MarshallingType.BOOLEAN) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("WithEvent").build(); private static final BatchExecuteStatementRequestMarshaller instance = new BatchExecuteStatementRequestMarshaller(); public static BatchExecuteStatementRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(BatchExecuteStatementRequest batchExecuteStatementRequest, ProtocolMarshaller protocolMarshaller) { if (batchExecuteStatementRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(batchExecuteStatementRequest.getClusterIdentifier(), CLUSTERIDENTIFIER_BINDING); protocolMarshaller.marshall(batchExecuteStatementRequest.getDatabase(), DATABASE_BINDING); protocolMarshaller.marshall(batchExecuteStatementRequest.getDbUser(), DBUSER_BINDING); protocolMarshaller.marshall(batchExecuteStatementRequest.getSecretArn(), SECRETARN_BINDING); protocolMarshaller.marshall(batchExecuteStatementRequest.getSqls(), SQLS_BINDING); protocolMarshaller.marshall(batchExecuteStatementRequest.getStatementName(), STATEMENTNAME_BINDING); protocolMarshaller.marshall(batchExecuteStatementRequest.getWithEvent(), WITHEVENT_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
awsdocs/aws-doc-sdk-examples
javav2/example_code/secretsmanager/src/main/java/com/example/secrets/UpdateSecret.java
2745
//snippet-sourcedescription:[UpdateSecret.java demonstrates how to update a secret for AWS Secrets Manager.] //snippet-keyword:[AWS SDK for Java v2] //snippet-keyword:[Code Sample] //snippet-service:[AWS Secrets Manager] //snippet-sourcetype:[full-example] //snippet-sourcedate:[09/27/2021] //snippet-sourceauthor:[scmacdon-AWS] /* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package com.example.secrets; //snippet-start:[secretsmanager.java2.update_secret.import] import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.secretsmanager.SecretsManagerClient; import software.amazon.awssdk.services.secretsmanager.model.SecretsManagerException; import software.amazon.awssdk.services.secretsmanager.model.UpdateSecretRequest; //snippet-end:[secretsmanager.java2.update_secret.import] /** * To run this AWS code example, ensure that you have setup your development environment, including your AWS credentials. * * For information, see this documentation topic: * *https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html */ public class UpdateSecret { public static void main(String[] args) { final String USAGE = "\n" + "Usage:\n" + " <secretName> <secretValue>\n\n" + "Where:\n" + " secretName - the name of the secret (for example, tutorials/MyFirstSecret). \n"+ " secretValue - the secret value that is updated. \n"; if (args.length < 2) { System.out.println(USAGE); System.exit(1); } String secretName = args[0]; String secretValue = args[1]; Region region = Region.US_EAST_1; SecretsManagerClient secretsClient = SecretsManagerClient.builder() .region(region) .build(); updateMySecret(secretsClient, secretName, secretValue); secretsClient.close(); } //snippet-start:[secretsmanager.java2.update_secret.main] public static void updateMySecret(SecretsManagerClient secretsClient, String secretName, String secretValue) { try { UpdateSecretRequest secretRequest = UpdateSecretRequest.builder() .secretId(secretName) .secretString(secretValue) .build(); secretsClient.updateSecret(secretRequest); } catch (SecretsManagerException e) { System.err.println(e.awsErrorDetails().errorMessage()); System.exit(1); } } //snippet-end:[secretsmanager.java2.update_secret.main] }
apache-2.0
o19s/elasticsearch-learning-to-rank
src/main/java/com/o19s/es/ltr/rest/RestStoreManager.java
5209
package com.o19s.es.ltr.rest; import com.o19s.es.ltr.action.ListStoresAction; import com.o19s.es.ltr.feature.store.index.IndexFeatureStore; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.BaseRestHandler; import java.io.IOException; import java.util.List; import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; public class RestStoreManager extends FeatureStoreBaseRestHandler { @Override public String getName() { return "Manage the LtR store"; } @Override public List<Route> routes() { return unmodifiableList(asList( new Route(RestRequest.Method.PUT, "/_ltr/{store}"), new Route(RestRequest.Method.PUT, "/_ltr"), new Route(RestRequest.Method.POST, "/_ltr/{store}"), new Route(RestRequest.Method.POST, "/_ltr"), new Route(RestRequest.Method.DELETE, "/_ltr/{store}"), new Route(RestRequest.Method.DELETE, "/_ltr"), new Route(RestRequest.Method.GET, "/_ltr"), new Route(RestRequest.Method.GET, "/_ltr/{store}") )); } /** * Prepare the request for execution. Implementations should consume all request params before * returning the runnable for actual execution. Unconsumed params will immediately terminate * execution of the request. However, some params are only used in processing the response; * implementations can override {@link BaseRestHandler#responseParams()} to indicate such * params. * * @param request the request to execute * @param client client for executing actions on the local node * @return the action to execute * @throws IOException if an I/O exception occurred parsing the request and preparing for execution */ @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { String indexName = indexName(request); if (request.method() == RestRequest.Method.PUT) { if (request.hasParam("store")) { IndexFeatureStore.validateFeatureStoreName(request.param("store")); } return createIndex(client, indexName); } else if (request.method() == RestRequest.Method.POST) { if (request.hasParam("store")) { IndexFeatureStore.validateFeatureStoreName(request.param("store")); } throw new IllegalArgumentException("Updating a feature store is not yet supported."); } else if (request.method() == RestRequest.Method.DELETE) { return deleteIndex(client, indexName); } else { assert request.method() == RestRequest.Method.GET; // XXX: ambiguous api if (request.hasParam("store")) { return getStore(client, indexName); } return listStores(client); } } RestChannelConsumer listStores(NodeClient client) { return (channel) -> new ListStoresAction.ListStoresActionBuilder(client).execute( new RestToXContentListener<>(channel) ); } RestChannelConsumer getStore(NodeClient client, String indexName) { return (channel) -> client.admin().indices().prepareExists(indexName) .execute(new RestBuilderListener<IndicesExistsResponse>(channel) { @Override public RestResponse buildResponse( IndicesExistsResponse indicesExistsResponse, XContentBuilder builder ) throws Exception { builder.startObject() .field("exists", indicesExistsResponse.isExists()) .endObject() .close(); return new BytesRestResponse( indicesExistsResponse.isExists() ? RestStatus.OK : RestStatus.NOT_FOUND, builder ); } }); } RestChannelConsumer createIndex(NodeClient client, String indexName) { return (channel) -> client.admin().indices() .create(IndexFeatureStore.buildIndexRequest(indexName), new RestToXContentListener<>(channel)); } RestChannelConsumer deleteIndex(NodeClient client, String indexName) { DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indexName); return (channel) -> client.admin().indices().delete(deleteIndexRequest, new RestToXContentListener<>(channel)); } }
apache-2.0
TheFinnishSocialInsuranceInstitution/KantaCDA-API
KantaCDA-API/src/main/java/fi/kela/kanta/cda/ReseptinKorjausKasaaja.java
24544
package fi.kela.kanta.cda; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Properties; import java.util.TimeZone; import javax.xml.bind.JAXBException; import org.hl7.v3.CD; import org.hl7.v3.CE; import org.hl7.v3.POCDMT000040Author; import org.hl7.v3.POCDMT000040ClinicalDocument; import org.hl7.v3.POCDMT000040Component3; import org.hl7.v3.POCDMT000040Component4; import org.hl7.v3.POCDMT000040Component5; import org.hl7.v3.POCDMT000040Entry; import org.hl7.v3.POCDMT000040Reference; import org.hl7.v3.ST; import org.hl7.v3.XActRelationshipDocument; import org.hl7.v3.XActRelationshipExternalReference; import fi.kela.kanta.cda.validation.ReseptinKorjausValidoija; import fi.kela.kanta.to.AmmattihenkiloTO; import fi.kela.kanta.to.LaakemaarayksenKorjausTO; import fi.kela.kanta.to.LaakemaaraysTO; import fi.kela.kanta.util.JaxbUtil; public class ReseptinKorjausKasaaja extends ReseptiKasaaja { LaakemaaraysTO alkuperainenLaakemaarays; LaakemaarayksenKorjausTO korjaus; ReseptinKorjausValidoija validoija; protected static final String VIRHE_KORJAUS_NULL = "Laakemaarayksen korjaus ei saa olla null."; protected static final String VIRHE_KORJAAJA_NULL = "Laakemaarayksen korjaaja ei saa olla null."; protected static final String VIRHE_KORJAUKSEN_SYY_KOODI_NULL_TAI_TYHJA = "Laakemaarayksen korjauksen syy koodi ei saa olla null tai tyhja."; protected static final String VIRHE_KORJAUKSEN_PERUSTELU_NULL_TAI_TYHJA = "Laakemaarayksen korjauksen perustelu ei saa olla null tai tyhja."; protected static final String VIRHE_ALKUPERAINEN_LAAKEMAARAYS_NULL = "Alkuperäinen lääkemääräys ei saa olla null."; protected static final String VIRHE_ALKUPERAISEN_LAAKEMAARAYKSEN_MAARAYSPAIVA_NULL = "Alkuperäisen lääkemääräyksen 'määräyspäivä' ei saa olla null."; protected static final String VIRHE_ALKUPERAISEN_LAAKEMAARAYKSEN_OID_NULL = "Alkuperäisen lääkemääräyksen 'oid' pitää löytyä."; protected static final String VIRHE_ALKUPERAISEN_LAAKEMAARAYKSEN_SETID_NULL = "Alkuperäisen lääkemääräyksen 'setid' pitää löytyä."; public ReseptinKorjausKasaaja(Properties properties, LaakemaarayksenKorjausTO korjaus, LaakemaaraysTO alkuperainenLaakemaarays) { super(properties); this.korjaus = korjaus; this.alkuperainenLaakemaarays = alkuperainenLaakemaarays; validoija = new ReseptinKorjausValidoija(korjaus, alkuperainenLaakemaarays); } /** * Täyttää LaakemaarayksenKorjausTOn tyhjät kentät alkuperäisestä lääkemääräyksestä. Asettaa myös korjauksen oidin, * setIdn, version ja alkuperainenOit tiedot. * * @param korjaus * LaakemaarayksenKorjausTO johon poimittavat tiedot sijoitetaan. * @param alkuperainenLaakemaarays * LaakemaaraysTO josta tiedot poimitaan. */ protected void paivataKorjaus() { if ( null == alkuperainenLaakemaarays ) { return; } korjaus.setAlkuperainenOid(alkuperainenLaakemaarays.getOid()); korjaus.setSetId(alkuperainenLaakemaarays.getSetId()); korjaus.setVersio(alkuperainenLaakemaarays.getVersio()); korjaus.setAlkuperainenCdaTyyppi(alkuperainenLaakemaarays.getCdaTyyppi()); if ( null == korjaus.getMaarayspaiva() ) { korjaus.setMaarayspaiva(alkuperainenLaakemaarays.getMaarayspaiva()); } if ( null == korjaus.getReseptintyyppi() ) { korjaus.setReseptintyyppi(alkuperainenLaakemaarays.getReseptintyyppi()); } if ( null == korjaus.getPakkauksienLukumaara() ) { korjaus.setPakkauksienLukumaara(alkuperainenLaakemaarays.getPakkauksienLukumaara()); } if ( null == korjaus.getLaakkeenKokonaismaaraValue() ) { korjaus.setLaakkeenKokonaismaaraValue(alkuperainenLaakemaarays.getLaakkeenKokonaismaaraValue()); } if ( null == korjaus.getLaakkeenKokonaismaaraUnit() ) { korjaus.setLaakkeenKokonaismaaraUnit(alkuperainenLaakemaarays.getLaakkeenKokonaismaaraUnit()); } if ( null == korjaus.getAjalleMaaratynReseptinAlkuaika() ) { korjaus.setAjalleMaaratynReseptinAlkuaika(alkuperainenLaakemaarays.getAjalleMaaratynReseptinAlkuaika()); } if ( null == korjaus.getAjalleMaaratynReseptinAikamaaraValue() ) { korjaus.setAjalleMaaratynReseptinAikamaaraValue( alkuperainenLaakemaarays.getAjalleMaaratynReseptinAikamaaraValue()); } if ( null == korjaus.getAjalleMaaratynReseptinAikamaaraUnit() ) { korjaus.setAjalleMaaratynReseptinAikamaaraUnit( alkuperainenLaakemaarays.getAjalleMaaratynReseptinAikamaaraUnit()); } if ( null == korjaus.getIterointiTeksti() ) { korjaus.setIterointiTeksti(alkuperainenLaakemaarays.getIterointiTeksti()); } if ( null == korjaus.getIterointienMaara() ) { korjaus.setIterointienMaara(alkuperainenLaakemaarays.getIterointienMaara()); } if ( null == korjaus.getIterointienValiValue() ) { korjaus.setIterointienValiValue(alkuperainenLaakemaarays.getIterointienValiValue()); } if ( null == korjaus.getIterointienValiUnit() ) { korjaus.setIterointienValiUnit(alkuperainenLaakemaarays.getIterointienValiUnit()); } if ( null == korjaus.getValmiste() ) { korjaus.setValmiste(alkuperainenLaakemaarays.getValmiste()); } if ( null == korjaus.getApteekissaValmistettavaLaake() ) { korjaus.setApteekissaValmistettavaLaake(alkuperainenLaakemaarays.getApteekissaValmistettavaLaake()); } if ( null == korjaus.getLaaketietokannanUlkopuolinenValmiste() ) { korjaus.setLaaketietokannanUlkopuolinenValmiste( alkuperainenLaakemaarays.getLaaketietokannanUlkopuolinenValmiste()); } if ( null == korjaus.getTyonantaja() ) { korjaus.setTyonantaja(alkuperainenLaakemaarays.getTyonantaja()); } if ( null == korjaus.getVakuutuslaitos() ) { korjaus.setVakuutuslaitos(alkuperainenLaakemaarays.getVakuutuslaitos()); } if ( null == korjaus.getAmmattihenkilo() ) { korjaus.setAmmattihenkilo(alkuperainenLaakemaarays.getAmmattihenkilo()); } if ( null == korjaus.getPotilas() ) { korjaus.setPotilas(alkuperainenLaakemaarays.getPotilas()); } if ( null == korjaus.isApteekissaValmistettavaLaake() ) { korjaus.setApteekissaValmistettavaLaake(alkuperainenLaakemaarays.isApteekissaValmistettavaLaake()); } if ( null == korjaus.isAnnosteluPelkastaanTekstimuodossa() ) { korjaus.setAnnosteluPelkastaanTekstimuodossa( alkuperainenLaakemaarays.isAnnosteluPelkastaanTekstimuodossa()); } if ( null == korjaus.getAnnostusohje() ) { korjaus.setAnnostusohje(alkuperainenLaakemaarays.getAnnostusohje()); } if ( null == korjaus.isSICmerkinta() ) { korjaus.setSICmerkinta(alkuperainenLaakemaarays.isSICmerkinta()); } if(null == korjaus.isAnnostusTarvittaessa()) { korjaus.setAnnostusTarvittaessa(alkuperainenLaakemaarays.isAnnostusTarvittaessa()); } if (null == korjaus.getAnnosajaksonPituus()) { korjaus.setAnnosajaksonPituus(alkuperainenLaakemaarays.getAnnosajaksonPituus()); } if (korjaus.getAnnokset().isEmpty()) { korjaus.getAnnokset().addAll(alkuperainenLaakemaarays.getAnnokset()); } if ( null == korjaus.isLaakevaihtokielto() ) { korjaus.setLaakevaihtokielto(alkuperainenLaakemaarays.isLaakevaihtokielto()); } if ( null == korjaus.getKayttotarkoitusTeksti() ) { korjaus.setKayttotarkoitusTeksti(alkuperainenLaakemaarays.getKayttotarkoitusTeksti()); } if ( null == korjaus.getAlle12VuotiaanPainoValue() ) { korjaus.setAlle12VuotiaanPainoValue(alkuperainenLaakemaarays.getAlle12VuotiaanPainoValue()); } if ( null == korjaus.getAlle12VuotiaanPainoUnit() ) { korjaus.setAlle12VuotiaanPainoUnit(alkuperainenLaakemaarays.getAlle12VuotiaanPainoUnit()); } if ( null == korjaus.isAnnosjakelu() ) { korjaus.setAnnosjakelu(alkuperainenLaakemaarays.isAnnosjakelu()); } if ( null == korjaus.getAnnosjakeluTeksti() ) { korjaus.setAnnosjakeluTeksti(alkuperainenLaakemaarays.getAnnosjakeluTeksti()); } if ( korjaus.getHoitolajit().isEmpty() ) { korjaus.getHoitolajit().addAll(alkuperainenLaakemaarays.getHoitolajit()); } if ( null == korjaus.getViestiApteekille() ) { korjaus.setViestiApteekille(alkuperainenLaakemaarays.getViestiApteekille()); } if ( null == korjaus.getErillisselvitys() ) { korjaus.setErillisselvitys(alkuperainenLaakemaarays.getErillisselvitys()); } if ( null == korjaus.getErillisselvitysteksti() ) { korjaus.setErillisselvitysteksti(alkuperainenLaakemaarays.getErillisselvitysteksti()); } if ( null == korjaus.getPotilaanTunnistaminen() ) { korjaus.setPotilaanTunnistaminen(alkuperainenLaakemaarays.getPotilaanTunnistaminen()); } if ( null == korjaus.getPotilaanTunnistaminenTeksti() ) { korjaus.setPotilaanTunnistaminenTeksti(alkuperainenLaakemaarays.getPotilaanTunnistaminenTeksti()); } if ( null == korjaus.getPKVlaakemaarays() ) { korjaus.setPKVlaakemaarays(alkuperainenLaakemaarays.getPKVlaakemaarays()); } if ( null == korjaus.isPysyvaislaakitys() ) { korjaus.setPysyvaislaakitys(alkuperainenLaakemaarays.isPysyvaislaakitys()); } if ( null == korjaus.isKyseessaLaakkeenkaytonAloitus() ) { korjaus.setKyseessaLaakkeenkaytonAloitus(alkuperainenLaakemaarays.isKyseessaLaakkeenkaytonAloitus()); } if ( null == korjaus.isHuume() ) { korjaus.setHuume(alkuperainenLaakemaarays.isHuume()); } if ( null == korjaus.getReseptinLaji() ) { korjaus.setReseptinLaji(alkuperainenLaakemaarays.getReseptinLaji()); } if ( null == korjaus.isUudistamiskielto() ) { korjaus.setUudistamiskielto(alkuperainenLaakemaarays.isUudistamiskielto()); } if ( null == korjaus.getUusimiskiellonSyy() ) { korjaus.setUusimiskiellonSyy(alkuperainenLaakemaarays.getUusimiskiellonSyy()); } if ( null == korjaus.getUusimiskiellonPerustelu() ) { korjaus.setUusimiskiellonPerustelu(alkuperainenLaakemaarays.getUusimiskiellonPerustelu()); } if ( null == korjaus.getLaaketietokannanVersio() ) { korjaus.setLaaketietokannanVersio(alkuperainenLaakemaarays.getLaaketietokannanVersio()); } if ( null == korjaus.getApteekissaTallennettuLaakemaarays() ) { korjaus.setApteekissaTallennettuLaakemaarays( alkuperainenLaakemaarays.getApteekissaTallennettuLaakemaarays()); } if ( null == korjaus.getTartuntatauti() ) { korjaus.setTartuntatauti(alkuperainenLaakemaarays.getTartuntatauti()); } if ( null == korjaus.getApteekissaTallennettuLaakemaaraysPerustelu() ) { korjaus.setApteekissaTallennettuLaakemaaraysPerustelu( alkuperainenLaakemaarays.getApteekissaTallennettuLaakemaaraysPerustelu()); } if ( null == korjaus.getApteekissaTallennettuLaakemaaraysMuuSyy() ) { korjaus.setApteekissaTallennettuLaakemaaraysMuuSyy( alkuperainenLaakemaarays.getApteekissaTallennettuLaakemaaraysMuuSyy()); } } /** * Luo lääkemääräyksen korjauksen LaakemaarayksenKorjausTO tiedoista. * * @param korjaus * LaakemaarayksenKorjausTO josta korjauksen tiedot poimitaan. * @param alkuperainenLaakemaarays * alkuperainen LaakemaaraysTO. * @return lääkemääräyksen korjaus cda POCDMT00040ClinicalDocumenttinä */ private POCDMT000040ClinicalDocument kasaaReseptinKorjaus(LaakemaarayksenKorjausTO korjaus, LaakemaaraysTO alkuperainenLaakemaarays) { validoija.validoiKorjaus(); paivataKorjaus(); // Varmistetaan vielä, että pakollisen potilaan tiedot on korjauksella. validoija.validoiHenkilotiedot(korjaus.getPotilas()); AmmattihenkiloTO tmp = korjaus.getAmmattihenkilo(); POCDMT000040ClinicalDocument cda = null; try { // Asettaa korjaajan käsittelijäksi (koska ko. tieto otetaan tästä). // Alkuperäisen määrääjän tiedot otetaan kuitenkin alkuperäisestä määräyksestä korjaus.setAmmattihenkilo(korjaus.getKorjaaja()); cda = kasaaCdaRelatedDocumentTiedonKanssa(korjaus, alkuperainenLaakemaarays); } finally { // Palauta tiedot kuten oli aluksikin korjaus.setAmmattihenkilo(tmp); } return cda; } /** * Luo viittaukset alkuperaiseen lääkemääräykseen ja lääkemääräyksen korjaukseen itseensä sekä * * @param laakemaarays * LaakemaaraysTO josta tietoja haetaan * @return POCDMT000040Reference lista */ @Override protected Collection<POCDMT000040Reference> luoViittaukset(LaakemaaraysTO laakemaarays) { Collection<POCDMT000040Reference> viittaukset = new ArrayList<POCDMT000040Reference>(); CD code = of.createCD(); String oid = laakemaarays.getOid(); String setId = laakemaarays.getSetId(); fetchAttributes("korjaus.code", code); // viittaus itseensä viittaukset.add(luoViittaus(oid, setId, XActRelationshipExternalReference.SPRT, code)); // Viittaus alkuperaiseen lääkemääräykseen CD alkupCode = of.createCD(); if ( laakemaarays instanceof LaakemaarayksenKorjausTO ) { if ( ((LaakemaarayksenKorjausTO) laakemaarays) .getAlkuperainenCdaTyyppi() == KantaCDAConstants.ReseptisanomanTyyppi.LAAKEMAARAYKSEN_KORJAUS .getTyyppi() ) { fetchAttributes("korjaus.code", alkupCode); } else { fetchAttributes(Kasaaja.LM_CONTENTS, alkupCode); } viittaukset.add(luoViittaus(((LaakemaarayksenKorjausTO) laakemaarays).getAlkuperainenOid(), setId, XActRelationshipExternalReference.RPLC, alkupCode)); } return viittaukset; } /** * Luo lääkemääräyksen korjauksen perustelun ja korjaajan tiedot * * @param korjaus * LaakemaarayksenKorjausTO josta tiedot haetaan * @return POCDMT000040Component4 elementin johon annetut korjaus tiedot asetettu */ @Override protected POCDMT000040Component4 luoKorjauksenSyyPerusteluJaKorjaaja(LaakemaaraysTO laakemaarays) { if ( !(laakemaarays instanceof LaakemaarayksenKorjausTO) ) { return null; } LaakemaarayksenKorjausTO korjausTO = (LaakemaarayksenKorjausTO) laakemaarays; if ( onkoNullTaiTyhja(korjausTO.getKorjauksenSyyKoodi()) ) { return null; } POCDMT000040Component4 korjausComp = of.createPOCDMT000040Component4(); korjausComp.setObservation(of.createPOCDMT000040Observation()); CE korjausCodeValue = of.createCE(); fetchAttributes(korjausTO.getKorjauksenSyyKoodi() + ".muutoksensyy", korjausCodeValue); korjausCodeValue.setCode(korjausTO.getKorjauksenSyyKoodi()); asetaObservation("97", korjausCodeValue, korjausComp.getObservation()); if ( !onkoNullTaiTyhja(korjausTO.getKorjauksenPerustelu()) ) { ST korjausPerusteluValue = of.createST(); korjausPerusteluValue.getContent().add(korjausTO.getKorjauksenPerustelu()); korjausComp.getObservation().getValues().add(korjausPerusteluValue); } POCDMT000040Author author = of.createPOCDMT000040Author(); author.setTime(of.createTS()); author.getTime().getNullFlavors().add("NI"); author.setAssignedAuthor(of.createPOCDMT000040AssignedAuthor()); author.getAssignedAuthor().getIds().add(of.createII()); author.getAssignedAuthor().getIds().get(0).getNullFlavors().add("NI"); author.getAssignedAuthor().setAssignedPerson(of.createPOCDMT000040Person()); author.getAssignedAuthor().getAssignedPerson().getNames().add(getNames(korjausTO.getKorjaaja().getKokonimi())); korjausComp.getObservation().getAuthors().add(author); return korjausComp; } /** * Palauttaa lääkemääräyksen korjauksen muut tiedot osion entry/orgsnizer/code elementin coden * * @return String muut tiedot osion code elementin code atribuutin arvo */ @Override protected String getMuutTiedotCode() { return "99"; } /** * Palauttaa CDA-asiakirjan perustuen konstruktorissa annetun ReseptikorjausTO:n tietoihin. * * @return String CDA-asiakirja XML-muodossa */ @Override public String kasaaReseptiAsiakirja() throws JAXBException { return JaxbUtil.getInstance().marshalloi(kasaaReseptiCDA(), "urn:hl7-org:v3 CDA_Fi.xsd"); } @Override protected POCDMT000040Entry luoAsiakirjanMuutTiedot(POCDMT000040Entry entry) { if ( null != entry.getOrganizer() ) { entry.getOrganizer().getComponents().add(luoKorjauksenSyyPerusteluJaKorjaaja(korjaus)); } return entry; } @Override protected void luoAsiakirjakohtaisetRakenteet(LaakemaaraysTO laakemaarays) { // ei mitään } /** * Palauttaa CDA-asiakirjan perustuen konstruktorissa annetun ReseptikorjausTO:n tietoihin. * * @return POCDMT000040ClinicalDocument CDA-asiakirja JAXB-elementteinä */ @Override public POCDMT000040ClinicalDocument kasaaReseptiCDA() { return kasaaReseptinKorjaus(korjaus, alkuperainenLaakemaarays); } /** * Luo lääkemääräyksen annetuista tiedoista, lisäten related document osion, jolla viitataan aiempaan * lääkemääräykseen johon tämä (uusittava/korjattava/...) lääkemääräys viittaa. * * @param laakemaarays * Lääkemääräys TO, jonka pohjalta cda luodaan * @param alkuperainenLaakemaarays * Alkuperäinen lääkemääräys, johon viitataan related document osiossa, sekä josta katsotaan alkuperäisen * määrääjän author tiedot * @param rooli * Rooli, jota käytetään määräyksen tekijäksi cda:ssa (kts. {@link http * ://91.202.112.142/codeserver/pages/code-list-page.xhtml?versionKey=347}) * @param uusimispyynnonOid * Oid joka on luotu uusimispyyntöön. Anna "tyhjä", jos käytetään alkuperäisen lääkemääräyksen oidia * viittauksessa * @param uusimispyynnonOid * Uusimispyynnössä käytetty oid * @param uusimispyynnonSetId * Uusimispyynnössä käytetty setId * @return Muodostettu cda */ protected POCDMT000040ClinicalDocument kasaaCdaRelatedDocumentTiedonKanssa(LaakemaaraysTO laakemaarays, LaakemaaraysTO alkuperaisetTiedot) { String relatedOid, relatedSetId; validoija.validoiAlkuperainenLaakemaarays(); relatedOid = alkuperaisetTiedot.getOid(); relatedSetId = alkuperaisetTiedot.getSetId(); Calendar now = Calendar.getInstance(TimeZone.getTimeZone(ReseptiKasaaja.TIME_ZONE)); String effectiveTimeValue = getDateFormat().format(now.getTime()); String today = getTodayDateFormat().format(now.getTime()); POCDMT000040ClinicalDocument clinicalDocument = of.createPOCDMT000040ClinicalDocument(); addIdFields(clinicalDocument, laakemaarays, effectiveTimeValue); Object[] args = { MaaraajanRooli.KORJAAJA.getPropertyAvaimenOsa() }; // Asetetaan title ja code fetchAttributes(String.format(ReseptiKasaaja.code, args), clinicalDocument.getCode()); clinicalDocument.getTitle().getContent().clear(); clinicalDocument.getTitle().getContent().add(fetchProperty(String.format(ReseptiKasaaja.title, args))); // Korjauksessa korjaajan rooli on oltava "KOR", uusimisessa "LAL" laakemaarays.getAmmattihenkilo().setRooli(MaaraajanRooli.KORJAAJA.getRooliKoodi()); addRecordTarget(clinicalDocument, laakemaarays.getPotilas()); // Edellisen lääkemääräyksen tehnyt ammattihenkilö (lisätään vain korjauksessa) addAuthor(clinicalDocument, luoAuthor(alkuperaisetTiedot.getAmmattihenkilo())); // Mahdollinen edellisen lääkemääräyksen kirjaaja (esim. apteekki) if ( alkuperaisetTiedot.getKirjaaja() != null ) { addAuthor(clinicalDocument, luoAuthor(alkuperaisetTiedot.getKirjaaja())); } // Uusija / korjaaja addAuthor(clinicalDocument, luoAuthor(laakemaarays.getAmmattihenkilo())); addCustodian(clinicalDocument); addAuthorization(clinicalDocument, laakemaarays.getAlaikaisenKieltoKoodi()); addRelatedDocument(clinicalDocument, relatedOid, relatedSetId, getPropertyCode(alkuperaisetTiedot.getCdaTyyppi(), args), XActRelationshipDocument.RPLC); addComponentOf(clinicalDocument, getDateFormat().format(laakemaarays.getMaarayspaiva())/* effectiveTimeValue */, alkuperaisetTiedot.getLaatimispaikka(), alkuperaisetTiedot.getPalvelutapahtumanOid()); addLocalHeader(clinicalDocument); clinicalDocument.setComponent(of.createPOCDMT000040Component2()); clinicalDocument.getComponent().setStructuredBody(of.createPOCDMT000040StructuredBody()); POCDMT000040Component3 component3 = of.createPOCDMT000040Component3(); clinicalDocument.getComponent().getStructuredBody().getComponents().add(component3); component3.getTemplateIds().add(of.createPOCDMT000040InfrastructureRootTemplateId()); // TemplateId fetchAttributes(ReseptiKasaaja.template_id, component3.getTemplateIds().get(0)); component3.setSection(of.createPOCDMT000040Section()); component3.getSection().setAttributeID(getNextOID(laakemaarays)); component3.getSection().setId(of.createII()); component3.getSection().getId().setRoot(getId(laakemaarays)); component3.getSection().setCode(of.createCE()); fetchAttributes(String.format(ReseptiKasaaja.code, args), component3.getSection().getCode()); component3.getSection().setTitle(of.createST()); // Title component3.getSection().getTitle().getContent().add(component3.getSection().getCode().getDisplayName()); POCDMT000040Component5 component5 = luoComponent(laakemaarays); component3.getSection().getComponents().add(component5); // Narrative (paikka, aika, lääkäri) component5.getSection().setText(luoKorjausNarrativePaikkaPvmLaakari(alkuperaisetTiedot, laakemaarays, today)); POCDMT000040Component5 component6 = luoComponent(laakemaarays); component5.getSection().getComponents().add(component6); // Narrative (lääkemääräyksen tiedot) component6.getSection().setText(luoNarrativeLaakemaarays(laakemaarays)); // Valmisteen ja Pakkausten tiedot component6.getSection().getEntries().add(luoValmisteenJaPakkauksenTiedot(laakemaarays, effectiveTimeValue, alkuperaisetTiedot.getAmmattihenkilo())); // Vaikuttavat aineet if ( lisataankoVaikuttavatAineet(laakemaarays) ) { component6.getSection().getEntries().add(luoVaikuttavatAinesosat(laakemaarays)); } // Muut ainesosat // Vain apteekissa valmistettaville? if ( laakemaarays.isApteekissaValmistettavaLaake() ) { component6.getSection().getEntries().add(luoMuutAinesosat(laakemaarays)); } // annostus component6.getSection().getEntries().add(luoAnnostus(laakemaarays)); // Lääkemääräyksen muut tiedot component6.getSection().getEntries().add(luoMuutTiedot(laakemaarays)); return clinicalDocument; } }
apache-2.0
amccurry/lucene-document-security
src/main/java/lucene/security/DocumentVisibilityEvaluator.java
1420
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package lucene.security; import java.io.IOException; import lucene.security.accumulo.VisibilityEvaluator; import lucene.security.accumulo.VisibilityParseException; public class DocumentVisibilityEvaluator { private VisibilityEvaluator _evaluator; public DocumentVisibilityEvaluator(DocumentAuthorizations authorizations) { _evaluator = new VisibilityEvaluator(authorizations); } public boolean evaluate(DocumentVisibility visibility) throws IOException { try { return _evaluator.evaluate(visibility); } catch (VisibilityParseException e) { throw new IOException(e); } } }
apache-2.0
hibernate/hibernate-semantic-query
src/main/java/org/hibernate/query/sqm/domain/SqmPluralAttribute.java
1505
/* * Hibernate, Relational Persistence for Idiomatic Java * * License: GNU Lesser General Public License (LGPL), version 2.1 or later * See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html */ package org.hibernate.query.sqm.domain; import javax.persistence.metamodel.PluralAttribute; import org.hibernate.persister.collection.spi.CollectionElement; import org.hibernate.persister.collection.spi.CollectionIndex; /** * Models references to plural attributes (persistent collections) * * @author Steve Ebersole * * @deprecated {@link org.hibernate.persister.common.spi.PluralPersistentAttribute} */ @Deprecated public interface SqmPluralAttribute<J> extends SqmAttribute<J>, SqmDomainTypeExporter<J>, SqmNavigableSource<J> { /** * Classifications of the plurality */ enum CollectionClassification { SET( PluralAttribute.CollectionType.SET ), LIST( PluralAttribute.CollectionType.LIST ), MAP( PluralAttribute.CollectionType.MAP ), BAG( PluralAttribute.CollectionType.COLLECTION ); private final PluralAttribute.CollectionType jpaClassification; CollectionClassification(PluralAttribute.CollectionType jpaClassification) { this.jpaClassification = jpaClassification; } public PluralAttribute.CollectionType toJpaClassification() { return jpaClassification; } } CollectionClassification getCollectionClassification(); CollectionElement getElementDescriptor(); CollectionIndex getIndexDescriptor(); String getRole(); }
apache-2.0
bencomp/dataverse
src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java
11729
package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersionUser; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.Template; import edu.harvard.iq.dataverse.api.imports.ImportUtil; import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Iterator; import java.util.Objects; import java.util.Set; import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; import javax.validation.ConstraintViolation; /** * Creates a {@link Dataset} in the passed {@link CommandContext}. * * @author michael */ @RequiredPermissions(Permission.AddDataset) public class CreateDatasetCommand extends AbstractCommand<Dataset> { private static final Logger logger = Logger.getLogger(CreateDatasetCommand.class.getCanonicalName()); private final Dataset theDataset; private final boolean registrationRequired; // TODO: rather than have a boolean, create a sub-command for creating a dataset during import private final ImportUtil.ImportType importType; private final Template template; public CreateDatasetCommand(Dataset theDataset, User user) { super(user, theDataset.getOwner()); this.theDataset = theDataset; this.registrationRequired = false; this.importType=null; this.template=null; } public CreateDatasetCommand(Dataset theDataset, User user, boolean registrationRequired) { super(user, theDataset.getOwner()); this.theDataset = theDataset; this.registrationRequired = registrationRequired; this.importType=null; this.template=null; } public CreateDatasetCommand(Dataset theDataset, User user, boolean registrationRequired, ImportUtil.ImportType importType) { super(user, theDataset.getOwner()); this.theDataset = theDataset; this.registrationRequired = registrationRequired; this.importType=importType; this.template=null; } public CreateDatasetCommand(Dataset theDataset, AuthenticatedUser user, boolean registrationRequired, ImportUtil.ImportType importType, Template template) { super(user, theDataset.getOwner()); this.theDataset = theDataset; this.registrationRequired = registrationRequired; this.importType=importType; this.template=template; } @Override public Dataset execute(CommandContext ctxt) throws CommandException { SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd-hh.mm.ss"); if ( (importType != ImportType.MIGRATION && importType != ImportType.HARVEST) && !ctxt.datasets().isUniqueIdentifier(theDataset.getIdentifier(), theDataset.getProtocol(), theDataset.getAuthority(), theDataset.getDoiSeparator()) ) { throw new IllegalCommandException(String.format("Dataset with identifier '%s', protocol '%s' and authority '%s' already exists", theDataset.getIdentifier(), theDataset.getProtocol(), theDataset.getAuthority()), this); } // If we are importing with the API, then we don't want to create an editable version, // just save the version is already in theDataset. DatasetVersion dsv = importType!=null? theDataset.getLatestVersion() : theDataset.getEditVersion(); // validate // @todo for now we run through an initFields method that creates empty fields for anything without a value // that way they can be checked for required dsv.setDatasetFields(dsv.initDatasetFields()); Set<ConstraintViolation> constraintViolations = dsv.validate(); if (!constraintViolations.isEmpty()) { String validationFailedString = "Validation failed:"; for (ConstraintViolation constraintViolation : constraintViolations) { validationFailedString += " " + constraintViolation.getMessage(); } throw new IllegalCommandException(validationFailedString, this); } logger.log(Level.FINE, "after validation " + formatter.format(new Date().getTime())); // TODO remove theDataset.setCreator((AuthenticatedUser) getUser()); theDataset.setCreateDate(new Timestamp(new Date().getTime())); Iterator<DatasetField> dsfIt = dsv.getDatasetFields().iterator(); while (dsfIt.hasNext()) { if (dsfIt.next().removeBlankDatasetFieldValues()) { dsfIt.remove(); } } Iterator<DatasetField> dsfItSort = dsv.getDatasetFields().iterator(); while (dsfItSort.hasNext()) { dsfItSort.next().setValueDisplayOrder(); } Timestamp createDate = new Timestamp(new Date().getTime()); dsv.setCreateTime(createDate); dsv.setLastUpdateTime(createDate); theDataset.setModificationTime(createDate); for (DataFile dataFile: theDataset.getFiles() ){ dataFile.setCreator((AuthenticatedUser) getUser()); dataFile.setCreateDate(theDataset.getCreateDate()); } logger.log(Level.FINE,"after datascrub " + formatter.format(new Date().getTime())); String nonNullDefaultIfKeyNotFound = ""; String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound); String authority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound); String doiSeparator = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiSeparator, nonNullDefaultIfKeyNotFound); String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); if (theDataset.getProtocol()==null) theDataset.setProtocol(protocol); if (theDataset.getAuthority()==null) theDataset.setAuthority(authority); if (theDataset.getDoiSeparator()==null) theDataset.setDoiSeparator(doiSeparator); if (theDataset.getIdentifier()==null) { theDataset.setIdentifier(ctxt.datasets().generateIdentifierSequence(theDataset.getProtocol(), theDataset.getAuthority(), theDataset.getDoiSeparator())); } // Attempt the registration if importing dataset through the API, or the app (but not harvest or migrate) if ((importType==null || importType.equals(ImportType.NEW)) && protocol.equals("doi") && doiProvider.equals("EZID") && theDataset.getGlobalIdCreateTime() == null) { String doiRetString = ctxt.doiEZId().createIdentifier(theDataset); // Check return value to make sure registration succeeded if (doiRetString.contains(theDataset.getIdentifier())) { theDataset.setGlobalIdCreateTime(createDate); } } else { // If harvest or migrate, and this is a released dataset, we don't need to register, // so set the globalIdCreateTime to now if (theDataset.getLatestVersion().getVersionState().equals(VersionState.RELEASED) ){ theDataset.setGlobalIdCreateTime(new Date()); } } if (registrationRequired && theDataset.getGlobalIdCreateTime() == null) { throw new IllegalCommandException("Dataset could not be created. Registration failed", this); } logger.log(Level.FINE,"after doi " + formatter.format(new Date().getTime())); Dataset savedDataset = ctxt.em().merge(theDataset); logger.log(Level.FINE,"after db update " + formatter.format(new Date().getTime())); // set the role to be default contributor role for its dataverse if (importType==null || importType.equals(ImportType.NEW)) { ctxt.roles().save(new RoleAssignment(savedDataset.getOwner().getDefaultContributorRole(), getUser(), savedDataset)); } savedDataset.setPermissionModificationTime(new Timestamp(new Date().getTime())); savedDataset = ctxt.em().merge(savedDataset); if(template != null){ ctxt.templates().incrementUsageCount(template.getId()); } try { /** * @todo Do something with the result. Did it succeed or fail? */ boolean doNormalSolrDocCleanUp = true; Future<String> indexDatasetFuture = ctxt.index().indexDataset(savedDataset, doNormalSolrDocCleanUp); // logger.log(Level.INFO, "during dataset save, indexing result was: {0}", indexingResult); } catch ( RuntimeException e ) { logger.log(Level.WARNING, "Exception while indexing:" + e.getMessage(), e); } logger.log(Level.FINE,"after index " + formatter.format(new Date().getTime())); // if we are not migrating, assign the user to this version if (importType==null || importType.equals(ImportType.NEW)) { DatasetVersionUser datasetVersionDataverseUser = new DatasetVersionUser(); String id = getUser().getIdentifier(); id = id.startsWith("@") ? id.substring(1) : id; AuthenticatedUser au = ctxt.authentication().getAuthenticatedUser(id); datasetVersionDataverseUser.setAuthenticatedUser(au); datasetVersionDataverseUser.setDatasetVersion(savedDataset.getLatestVersion()); datasetVersionDataverseUser.setLastUpdateDate((Timestamp) createDate); if (savedDataset.getLatestVersion().getId() == null){ logger.warning("CreateDatasetCommand: savedDataset version id is null"); } else { datasetVersionDataverseUser.setDatasetVersion(savedDataset.getLatestVersion()); } ctxt.em().merge(datasetVersionDataverseUser); } logger.log(Level.FINE,"after create version user " + formatter.format(new Date().getTime())); return savedDataset; } @Override public int hashCode() { int hash = 7; hash = 97 * hash + Objects.hashCode(this.theDataset); return hash; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof CreateDatasetCommand)) { return false; } final CreateDatasetCommand other = (CreateDatasetCommand) obj; return Objects.equals(this.theDataset, other.theDataset); } @Override public String toString() { return "[DatasetCreate dataset:" + theDataset.getId() + "]"; } }
apache-2.0
SocraticGrid/UCS-Implementation
ucs-nifi-samples/ucs-nifi-test-workbench/src/main/java/org/socraticgrid/hl7/ucs/nifi/test/workbench/command/NewAlertMessageCommand.java
4903
/* * Copyright 2015 Cognitive Medical Systems, Inc (http://www.cognitivemedicine.com). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.socraticgrid.hl7.ucs.nifi.test.workbench.command; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import org.socraticgrid.hl7.services.uc.model.AlertMessage; import org.socraticgrid.hl7.services.uc.model.AlertStatus; import org.socraticgrid.hl7.services.uc.model.MessageModel; import org.socraticgrid.hl7.ucs.nifi.common.model.MessageWrapper; import org.socraticgrid.hl7.ucs.nifi.common.serialization.MessageSerializationException; import org.socraticgrid.hl7.ucs.nifi.common.serialization.MessageSerializer; import org.socraticgrid.hl7.ucs.nifi.common.util.AlertMessageBuilder; import org.socraticgrid.hl7.ucs.nifi.common.util.MessageBuilder; /** * * @author esteban */ public class NewAlertMessageCommand implements Command { private AlertMessage message; public static class Recipient { public String to; public String getTo() { return to; } public String getType() { return "ALERT"; } } public static class Property { public String key; public String value; public String getKey() { return key; } public String getValue() { return value; } } @Override public void init(JsonObject config) { try { String conversationId = config.get("conversationId").getAsString(); String from = config.get("from").getAsString(); String status = config.get("status") == null ? "New" : config.get("status").getAsString(); String subject = config.get("subject").getAsString(); String body = config.get("body").getAsString(); List<Recipient> finalRecipients = new ArrayList<>(); JsonArray recipients = config.get("recipients").getAsJsonArray(); for (JsonElement recipient : recipients) { Recipient r = new Recipient(); r.to = recipient.getAsJsonObject().get("to").getAsString(); finalRecipients.add(r); } List<Property> finalProperties = new ArrayList<>(); JsonArray properties = config.get("properties").getAsJsonArray(); for (JsonElement property : properties) { Property p = new Property(); p.key = property.getAsJsonObject().get("key").getAsString(); p.value = property.getAsJsonObject().get("value").getAsString(); finalProperties.add(p); } AlertMessageBuilder messageBuilder = (AlertMessageBuilder) new AlertMessageBuilder() .withStatus(AlertStatus.valueOf(status)) .withMessageId(UUID.randomUUID().toString()) .withSender(from) .withConversationId(conversationId) .withSubject(subject) .withBody(body); for (Recipient finalRecipient : finalRecipients) { messageBuilder.addRecipient(new MessageBuilder.Recipient(finalRecipient.getTo(), finalRecipient.getType())); } for (Property finalProperty : finalProperties) { messageBuilder.addProperty(finalProperty.key, finalProperty.value); } this.message = messageBuilder.buildMessage(); } catch (IOException | MessageSerializationException ex) { throw new IllegalArgumentException("Error preparing New Alert Message Command", ex); } } @Override public JsonObject execute() { try { CreateUCSSessionCommand.getLastSession().getNewClient().sendMessage(new MessageModel(message)); return new JsonObject(); } catch (Exception ex) { throw new IllegalArgumentException("Error executing New Alert Message Command: "+ex.getMessage(), ex); } } }
apache-2.0
flipkart-incubator/Masquerade
masquerade-core/src/main/java/com/flipkart/masquerade/processor/InterfaceProcessor.java
2751
/* * Copyright 2017 Flipkart Internet, pvt ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.flipkart.masquerade.processor; import com.flipkart.masquerade.Configuration; import com.flipkart.masquerade.rule.Rule; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeSpec; import com.squareup.javapoet.TypeVariableName; import javax.lang.model.element.Modifier; import static com.flipkart.masquerade.util.Helper.*; import static com.flipkart.masquerade.util.Strings.*; /** * Processor that creates an interface for a Mask * <p /> * Created by shrey.garg on 12/05/17. */ public class InterfaceProcessor { private final Configuration configuration; private final TypeSpec.Builder cloakBuilder; /** * @param configuration Configuration for the current processing cycle * @param cloakBuilder Entry class under construction for the cycle */ public InterfaceProcessor(Configuration configuration, TypeSpec.Builder cloakBuilder) { this.configuration = configuration; this.cloakBuilder = cloakBuilder; } /** * @param rule The rule to generate the interface for * @return A fully constructed TypeSpec object for the interface */ public TypeSpec generateInterface(Rule rule) { TypeSpec.Builder ruleInterface = TypeSpec.interfaceBuilder(getInterfaceName(rule)); ruleInterface.addModifiers(Modifier.PUBLIC); ruleInterface.addTypeVariable(TypeVariableName.get("T")); MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder(INTERFACE_METHOD); methodBuilder.addModifiers(Modifier.ABSTRACT, Modifier.PUBLIC); methodBuilder.addParameter(TypeVariableName.get("T"), OBJECT_PARAMETER); methodBuilder.addParameter(rule.getEvaluatorClass(), EVAL_PARAMETER); methodBuilder.addParameter(getEntryClass(configuration), CLOAK_PARAMETER); methodBuilder.addParameter(getRepositoryClass(configuration), SET_PARAMETER); if (configuration.isNativeSerializationEnabled()) { methodBuilder.addParameter(StringBuilder.class, SERIALIZED_OBJECT); } ruleInterface.addMethod(methodBuilder.build()); return ruleInterface.build(); } }
apache-2.0
trustathsh/visitmeta
dataservice/src/main/java/de/hshannover/f4/trust/visitmeta/persistence/inmemory/InMemoryMetadata.java
4123
/* * #%L * ===================================================== * _____ _ ____ _ _ _ _ * |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | | * | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| | * | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ | * |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_| * \____/ * * ===================================================== * * Hochschule Hannover * (University of Applied Sciences and Arts, Hannover) * Faculty IV, Dept. of Computer Science * Ricklinger Stadtweg 118, 30459 Hannover, Germany * * Email: trust@f4-i.fh-hannover.de * Website: http://trust.f4.hs-hannover.de/ * * This file is part of visitmeta-dataservice, version 0.6.0, * implemented by the Trust@HsH research group at the Hochschule Hannover. * %% * Copyright (C) 2012 - 2016 Trust@HsH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package de.hshannover.f4.trust.visitmeta.persistence.inmemory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.neo4j.graphdb.NotFoundException; import de.hshannover.f4.trust.visitmeta.dataservice.internalDatatypes.InternalMetadata; public class InMemoryMetadata extends InternalMetadata { private static final Logger log = Logger.getLogger(InMemoryMetadata.class); private Map<String, String> mProperties; private String mTypename; private boolean mIsSingleValue; private String mRawData; private long mPublishTimestamp; private long mDeleteTimestamp = InternalMetadata.METADATA_NOT_DELETED_TIMESTAMP; private InMemoryMetadata() { mProperties = new HashMap<String, String>(); } public InMemoryMetadata(String typename, boolean isSingleValue, long publishTimestamp) { this(); mTypename = typename; mIsSingleValue = isSingleValue; mPublishTimestamp = publishTimestamp; } public InMemoryMetadata(InternalMetadata original) { this(original.getTypeName(), original.isSingleValue(), original.getPublishTimestamp()); mRawData = original.getRawData(); for (String property : original.getProperties()) { this.addProperty(property, original.valueFor(property)); } } @Override public InMemoryMetadata clone() { return new InMemoryMetadata(this); } @Override public void addProperty(String name, String value) { if (mProperties.containsKey(name)) { log.warn("property '"+name+"' already exists, overwriting with '"+value+"'"); } mProperties.put(name, value); } @Override public List<String> getProperties() { return new ArrayList<String>(mProperties.keySet()); } @Override public boolean hasProperty(String p) { if (mProperties.containsKey(p)) { return true; } return false; } @Override public String valueFor(String p) { try { return mProperties.get(p); } catch(NotFoundException e) { log.warn("This Metadata does not contain the property " + p + "! " + this); } return ""; } @Override public boolean isSingleValue() { return mIsSingleValue; } @Override public String getTypeName() { return mTypename; } @Override public long getPublishTimestamp() { return mPublishTimestamp; } @Override public long getDeleteTimestamp() { if(this.isNotify()) { return getPublishTimestamp(); } return mDeleteTimestamp; } @Override public void setPublishTimestamp(long timestamp) { mPublishTimestamp = timestamp; } @Override public String getRawData() { return mRawData; } public void setRawData(String rawData) { mRawData = rawData; } }
apache-2.0
daima/solo-spring
src/main/java/org/b3log/solo/controller/InitProcessor.java
7183
/* * Copyright (c) 2017, cxy7.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.b3log.solo.controller; import java.net.URLDecoder; import java.util.Calendar; import java.util.Locale; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils; import org.apache.commons.validator.routines.EmailValidator; import org.b3log.solo.Keys; import org.b3log.solo.Latkes; import org.b3log.solo.SoloConstant; import org.b3log.solo.model.Common; import org.b3log.solo.model.Role; import org.b3log.solo.model.User; import org.b3log.solo.model.UserExt; import org.b3log.solo.module.util.QueryResults; import org.b3log.solo.module.util.Thumbnails; import org.b3log.solo.renderer.ConsoleRenderer; import org.b3log.solo.renderer.JSONRenderer; import org.b3log.solo.renderer.freemarker.AbstractFreeMarkerRenderer; import org.b3log.solo.service.InitService; import org.b3log.solo.service.LangPropsService; import org.b3log.solo.service.html.Filler; import org.b3log.solo.util.Locales; import org.b3log.solo.util.Sessions; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; /** * Solo initialization service. * * @author <a href="http://cxy7.com">XyCai</a> * @version 1.2.0.10, Aug 9, 2016 * @since 0.4.0 */ @Controller public class InitProcessor { /** * Logger. */ private static Logger logger = LoggerFactory.getLogger(InitProcessor.class); /** * Initialization service. */ @Autowired private InitService initService; /** * Filler. */ @Autowired private Filler filler; /** * Language service. */ @Autowired private LangPropsService langPropsService; /** * Max user name length. */ public static final int MAX_USER_NAME_LENGTH = 20; /** * Min user name length. */ public static final int MIN_USER_NAME_LENGTH = 1; /** * Shows initialization page. * * @param context * the specified http request context * @param request * the specified http servlet request * @param response * the specified http servlet response * @throws Exception * exception */ @RequestMapping(value = "/init", method = RequestMethod.GET) public void showInit(final HttpServletRequest request, final HttpServletResponse response) throws Exception { if (initService.isInited()) { response.sendRedirect("/"); return; } final AbstractFreeMarkerRenderer renderer = new ConsoleRenderer(); renderer.setTemplateName("init.ftl"); final Map<String, Object> dataModel = renderer.getDataModel(); final Map<String, String> langs = langPropsService.getAll(Locales.getLocale(request)); dataModel.putAll(langs); dataModel.put(Common.VERSION, SoloConstant.VERSION); dataModel.put(Common.STATIC_RESOURCE_VERSION, Latkes.getStaticResourceVersion()); dataModel.put(Common.YEAR, String.valueOf(Calendar.getInstance().get(Calendar.YEAR))); Keys.fillRuntime(dataModel); filler.fillMinified(dataModel); renderer.render(request, response); } /** * Initializes Solo. * * @param context * the specified http request context * @param request * the specified http servlet request, for example, * * <pre> * { * "userName": "", * "userEmail": "", * "userPassword": "" * } * </pre> * * @param response * the specified http servlet response * @throws Exception * exception */ @RequestMapping(value = "/init", method = RequestMethod.POST) public void initSolo(final HttpServletRequest request, final HttpServletResponse response, @RequestBody String body) throws Exception { if (initService.isInited()) { response.sendRedirect("/"); return; } final JSONObject ret = QueryResults.defaultResult(); final JSONRenderer renderer = new JSONRenderer(); renderer.setJSONObject(ret); try { body = URLDecoder.decode(body, "UTF-8"); final JSONObject requestJSONObject = new JSONObject(body); final String userName = requestJSONObject.optString(User.USER_NAME); final String userEmail = requestJSONObject.optString(User.USER_EMAIL); final String userPassword = requestJSONObject.optString(User.USER_PASSWORD); if (StringUtils.isBlank(userName) || StringUtils.isBlank(userEmail) || StringUtils.isBlank(userPassword) || !EmailValidator.getInstance().isValid(userEmail)) { ret.put(Keys.MSG, "Init failed, please check your input"); renderer.render(request, response); return; } if (invalidUserName(userName)) { ret.put(Keys.MSG, "Init failed, please check your username (length [1, 20], content {a-z, A-Z, 0-9}, do not contain 'admin' for security reason]"); renderer.render(request, response); return; } final Locale locale = Locales.getLocale(request); requestJSONObject.put(Keys.LOCALE, locale.toString()); initService.init(requestJSONObject); // If initialized, login the admin final JSONObject admin = new JSONObject(); admin.put(User.USER_NAME, userName); admin.put(User.USER_EMAIL, userEmail); admin.put(User.USER_ROLE, Role.ADMIN_ROLE); admin.put(User.USER_PASSWORD, userPassword); admin.put(UserExt.USER_AVATAR, Thumbnails.getGravatarURL(userEmail, "128")); Sessions.login(request, response, admin); ret.put(Keys.STATUS_CODE, true); } catch (final Exception e) { logger.error(e.getMessage(), e); ret.put(Keys.MSG, e.getMessage()); } renderer.render(request, response); } /** * Checks whether the specified name is invalid. * * <p> * A valid user name: * <ul> * <li>length [1, 20]</li> * <li>content {a-z, A-Z, 0-9}</li> * <li>Not contains "admin"/"Admin"</li> * </ul> * </p> * * @param name * the specified name * @return {@code true} if it is invalid, returns {@code false} otherwise */ public static boolean invalidUserName(final String name) { final int length = name.length(); if (length < MIN_USER_NAME_LENGTH || length > MAX_USER_NAME_LENGTH) { return true; } char c; for (int i = 0; i < length; i++) { c = name.charAt(i); if (('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || '0' <= c && c <= '9') { continue; } return true; } return name.contains("admin") || name.contains("Admin"); } }
apache-2.0
cfieber/clouddriver
clouddriver-kubernetes/src/main/groovy/com/netflix/spinnaker/clouddriver/kubernetes/v2/description/manifest/KubernetesManifest.java
5075
/* * Copyright 2017 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.spinnaker.clouddriver.kubernetes.v2.description.manifest; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.Data; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; public class KubernetesManifest extends HashMap<String, Object> { private static ObjectMapper mapper = new ObjectMapper(); private static <T> T getRequiredField(KubernetesManifest manifest, String field) { T res = (T) manifest.get(field); if (res == null) { throw MalformedManifestException.missingField(manifest, field); } return res; } @JsonIgnore public KubernetesKind getKind() { return KubernetesKind.fromString(getRequiredField(this, "kind")); } @JsonIgnore public void setKind(KubernetesKind kind) { put("kind", kind.toString()); } @JsonIgnore public KubernetesApiVersion getApiVersion() { return KubernetesApiVersion.fromString(getRequiredField(this, "apiVersion")); } @JsonIgnore public void setApiVersion(KubernetesApiVersion apiVersion) { put("apiVersion", apiVersion.toString()); } @JsonIgnore private Map<String, Object> getMetadata() { return getRequiredField(this, "metadata"); } @JsonIgnore public String getName() { return (String) getMetadata().get("name"); } @JsonIgnore public void setName(String name) { getMetadata().put("name", name); } @JsonIgnore public String getNamespace() { String namespace = (String) getMetadata().get("namespace"); return StringUtils.isEmpty(namespace) ? "default" : namespace; } @JsonIgnore public void setNamespace(String namespace) { getMetadata().put("namespace", namespace); } @JsonIgnore public String getCreationTimestamp() { return getMetadata().get("creationTimestamp").toString(); } @JsonIgnore public List<OwnerReference> getOwnerReferences() { Map<String, Object> metadata = getMetadata(); Object ownerReferences = metadata.get("ownerReferences"); if (ownerReferences == null) { return new ArrayList<>(); } return mapper.convertValue(ownerReferences, new TypeReference<List<OwnerReference>>() {}); } @JsonIgnore public Map<String, String> getAnnotations() { Map<String, String> result = (Map<String, String>) getMetadata().get("annotations"); if (result == null) { result = new HashMap<>(); getMetadata().put("annotations", result); } return result; } @JsonIgnore public Optional<Map<String, String>> getSpecTemplateAnnotations() { if (!containsKey("spec")) { return Optional.empty(); } Map<String, Object> spec = (Map<String, Object>) get("spec"); if (!spec.containsKey("template")) { return Optional.empty(); } Map<String, Object> template = (Map<String, Object>) spec.get("template"); if (!template.containsKey("metadata")) { return Optional.empty(); } Map<String, Object> metadata = (Map<String, Object>) template.get("metadata"); Map<String, String> result = (Map<String, String>) metadata.get("annotations"); if (result == null) { result = new HashMap<>(); metadata.put("annotations", result); } return Optional.of(result); } @JsonIgnore public Object getStatus() { return get("status"); } @JsonIgnore public String getFullResourceName() { return getFullResourceName(getKind(), getName()); } public static String getFullResourceName(KubernetesKind kind, String name) { return String.join(" ", kind.toString(), name); } public static Pair<KubernetesKind, String> fromFullResourceName(String fullResourceName) { String[] split = fullResourceName.split(" "); if (split.length != 2) { throw new IllegalArgumentException("Expected a full resource name of the form <kind> <name>"); } KubernetesKind kind = KubernetesKind.fromString(split[0]); String name = split[1]; return new ImmutablePair<>(kind, name); } @Data public static class OwnerReference { KubernetesApiVersion apiVersion; KubernetesKind kind; String name; String uid; boolean blockOwnerDeletion; boolean controller; } }
apache-2.0
ernestp/consulo
platform/dvcs-impl/src/com/intellij/dvcs/repo/AbstractRepositoryManager.java
3252
package com.intellij.dvcs.repo; import com.intellij.openapi.vcs.AbstractVcs; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.changes.ChangesUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; public abstract class AbstractRepositoryManager<T extends Repository> implements RepositoryManager<T> { @NotNull private final AbstractVcs myVcs; @NotNull private final String myRepoDirName; @NotNull private final VcsRepositoryManager myGlobalRepositoryManager; protected AbstractRepositoryManager(@NotNull VcsRepositoryManager globalRepositoryManager, @NotNull AbstractVcs vcs, @NotNull String repoDirName) { myGlobalRepositoryManager = globalRepositoryManager; myVcs = vcs; myRepoDirName = repoDirName; } @Override @Nullable public T getRepositoryForRoot(@Nullable VirtualFile root) { Repository repository = myGlobalRepositoryManager.getRepositoryForRoot(root); //noinspection unchecked return isRootValid(repository) ? (T)repository : null; } @Override public void addExternalRepository(@NotNull VirtualFile root, @NotNull T repository) { myGlobalRepositoryManager.addExternalRepository(root, repository); } @Override public void removeExternalRepository(@NotNull VirtualFile root) { myGlobalRepositoryManager.removeExternalRepository(root); } @Override public boolean isExternal(@NotNull T repository) { return myGlobalRepositoryManager.isExternal(repository); } @Override @Nullable public T getRepositoryForFile(@NotNull VirtualFile file) { Repository repository = myGlobalRepositoryManager.getRepositoryForFile(file); //noinspection unchecked return repository != null && myVcs.equals(repository.getVcs()) ? (T)repository : null; } @Override @Nullable public T getRepositoryForFile(@NotNull FilePath file) { VirtualFile vFile = ChangesUtil.findValidParentAccurately(file); return vFile != null ? getRepositoryForFile(vFile) : null; } @NotNull protected List<T> getRepositories(Class<T> type) { return ContainerUtil.findAll(myGlobalRepositoryManager.getRepositories(), type); } @NotNull @Override public abstract List<T> getRepositories(); @Override public boolean moreThanOneRoot() { return getRepositories().size() > 1; } @Override public void updateRepository(@Nullable VirtualFile root) { T repo = getRepositoryForRoot(root); if (repo != null) { repo.update(); } } @Override public void updateAllRepositories() { ContainerUtil.process(getRepositories(), new Processor<T>() { @Override public boolean process(T repo) { repo.update(); return true; } }); } private boolean isRootValid(@Nullable Repository repository) { if (repository == null || !myVcs.equals(repository.getVcs())) return false; VirtualFile vcsDir = repository.getRoot().findChild(myRepoDirName); return vcsDir != null && vcsDir.exists(); } }
apache-2.0
mahadevshindhe/thymeleaf-spring4
src/main/java/org/thymeleaf/spring4/expression/BeansPropertyAccessor.java
3270
/* * ============================================================================= * * Copyright (c) 2011-2012, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package org.thymeleaf.spring4.expression; import org.springframework.expression.AccessException; import org.springframework.expression.EvaluationContext; import org.springframework.expression.TypedValue; import org.springframework.expression.spel.support.ReflectivePropertyAccessor; import org.thymeleaf.spring4.context.Beans; /** * * @author Daniel Fern&aacute;ndez * * @since 1.1 * */ class BeansPropertyAccessor extends ReflectivePropertyAccessor { private static final Class<?>[] TARGET_CLASSES = new Class<?>[] { Beans.class }; public static final BeansPropertyAccessor INSTANCE = new BeansPropertyAccessor(); public BeansPropertyAccessor() { super(); } @Override public Class<?>[] getSpecificTargetClasses() { return TARGET_CLASSES; } @Override public boolean canRead(final EvaluationContext context, final Object target, final String name) throws AccessException { if (target == null) { return false; } if (!(target instanceof Beans)) { // This can happen simply because we're applying the same // AST tree on a different class (Spring internally caches property accessors). // So this exception might be considered "normal" by Spring AST evaluator and // just use it to refresh the property accessor cache. throw new AccessException("Cannot read target of class " + target.getClass().getName()); } return ((Beans)target).containsKey(name); } @Override public TypedValue read(final EvaluationContext context, final Object target, final String name) throws AccessException { if (target == null) { throw new AccessException("Cannot read property of null target"); } if (!(target instanceof Beans)) { // This can happen simply because we're applying the same // AST tree on a different class (Spring internally caches property accessors). // So this exception might be considered "normal" by Spring AST evaluator and // just use it to refresh the property accessor cache. throw new AccessException("Cannot read target of class " + target.getClass().getName()); } return new TypedValue(((Beans)target).get(name)); } }
apache-2.0
egacl/gatewayio
GatewayExample/src/main/java/cl/io/gateway/example/filter/TestMessageFilter.java
2094
/* * Copyright 2017 GetSoftware (http://www.getsoftware.cl) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cl.io.gateway.example.filter; import cl.io.gateway.IGatewayClientSession; import cl.io.gateway.example.networkservice.TestMessage; import cl.io.gateway.messaging.GatewayMessageFilter; import cl.io.gateway.messaging.IGatewayMessageFilter; import cl.io.gateway.network.NetworkMessage; /** * This class represents an example message filter which is defined from * the @GatewayMessageFilter annotation and the IGatewayMessageFilter interface. * In this case, the filter applies to the event with the identifier 'testvent' * and priority '1'. * * @author egacl * */ @GatewayMessageFilter(value = 1, event = "testevent", messageType = TestMessage.class) public class TestMessageFilter implements IGatewayMessageFilter<TestMessage> { @Override public boolean doFilterRequest(NetworkMessage<TestMessage> message, IGatewayClientSession client) throws Exception { System.out.println("Hello! I'am a filter request priority 1: " + message); return true; } @Override public boolean doFilterResponse(NetworkMessage<TestMessage> message, IGatewayClientSession client) throws Exception { System.out.println("Hello! I'am a filter response priority 1: " + message); return true; } @Override public void onError(NetworkMessage<TestMessage> message, IGatewayClientSession client, Throwable err) throws Exception { System.err.println("Error processing " + message); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-pinpoint/src/main/java/com/amazonaws/services/pinpoint/model/transform/ExportJobsResponseMarshaller.java
2295
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.pinpoint.model.transform; import java.util.List; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.pinpoint.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * ExportJobsResponseMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class ExportJobsResponseMarshaller { private static final MarshallingInfo<List> ITEM_BINDING = MarshallingInfo.builder(MarshallingType.LIST).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Item").build(); private static final MarshallingInfo<String> NEXTTOKEN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("NextToken").build(); private static final ExportJobsResponseMarshaller instance = new ExportJobsResponseMarshaller(); public static ExportJobsResponseMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(ExportJobsResponse exportJobsResponse, ProtocolMarshaller protocolMarshaller) { if (exportJobsResponse == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(exportJobsResponse.getItem(), ITEM_BINDING); protocolMarshaller.marshall(exportJobsResponse.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
SnappyDataInc/snappy-store
gemfirexd/tools/src/test/java/com/pivotal/gemfirexd/internal/engine/store/RowFormatterTest.java
81029
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ /* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ // // RowFormatterTest.java // gemfire // // Created by Eric Zoerner on 2009-05-05. package com.pivotal.gemfirexd.internal.engine.store; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.Arrays; import java.util.Calendar; import java.util.List; import java.util.Properties; import java.util.Random; import java.util.Set; import junit.framework.TestSuite; import junit.textui.TestRunner; import com.gemstone.gemfire.DataSerializer; import com.gemstone.gemfire.internal.AvailablePort; import com.gemstone.gemfire.internal.InternalDataSerializer; import com.gemstone.gemfire.internal.cache.LocalRegion; import com.gemstone.gemfire.internal.cache.PartitionedRegion; import com.gemstone.gemfire.internal.offheap.annotations.Released; import com.gemstone.gemfire.internal.offheap.annotations.Retained; import com.gemstone.gnu.trove.TIntArrayList; import com.pivotal.gemfirexd.TestUtil; import com.pivotal.gemfirexd.internal.catalog.UUID; import com.pivotal.gemfirexd.internal.catalog.types.DefaultInfoImpl; import com.pivotal.gemfirexd.internal.engine.Misc; import com.pivotal.gemfirexd.internal.engine.distributed.ByteArrayDataOutput; import com.pivotal.gemfirexd.internal.engine.sql.catalog.ExtraTableInfo; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.services.io.FormatableBitSet; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ColumnDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ColumnDescriptorList; import com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor; import com.pivotal.gemfirexd.internal.iapi.types.DataTypeUtilities; import com.pivotal.gemfirexd.internal.iapi.types.DataValueDescriptor; import com.pivotal.gemfirexd.internal.iapi.types.SQLChar; import com.pivotal.gemfirexd.internal.iapi.types.SQLDate; import com.pivotal.gemfirexd.internal.iapi.types.SQLDecimal; import com.pivotal.gemfirexd.internal.iapi.types.SQLDouble; import com.pivotal.gemfirexd.internal.iapi.types.SQLInteger; import com.pivotal.gemfirexd.internal.iapi.types.SQLLongint; import com.pivotal.gemfirexd.internal.iapi.types.SQLReal; import com.pivotal.gemfirexd.internal.iapi.types.SQLSmallint; import com.pivotal.gemfirexd.internal.iapi.types.SQLTime; import com.pivotal.gemfirexd.internal.iapi.types.SQLTimestamp; import com.pivotal.gemfirexd.internal.iapi.types.SQLTinyint; import com.pivotal.gemfirexd.internal.iapi.types.SQLVarchar; import com.pivotal.gemfirexd.internal.iapi.types.TypeId; import com.pivotal.gemfirexd.jdbc.JdbcTestBase; @SuppressWarnings("unchecked") public class RowFormatterTest extends JdbcTestBase { public static void main(String[] args) { TestRunner.run(new TestSuite(RowFormatterTest.class)); } public static final class NullHolder implements ResultWasNull { boolean wasNull; public void setWasNull() { this.wasNull = true; } public boolean wasNull() { return this.wasNull; } } public RowFormatterTest(String name) { super(name); } public void testReadAndWriteInts() { byte[] bytes = new byte[256]; int numToTest = 1024; Random rand = new Random(); // make sure we test at least one negative int boolean testedNeg = false; for (int i = 0; i < numToTest || !testedNeg; i++) { int nextInt = rand.nextInt(); if (nextInt < 0) testedNeg = true; assertEquals(4, RowFormatter.writeInt(bytes, nextInt, 42)); assertEquals(nextInt, RowFormatter.readInt(bytes, 42)); } } public void testReadAndWriteCompactInts() throws IOException { byte[] bytes = new byte[256]; final ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dout = new DataOutputStream(bos); final int numToTest = 1024; TIntArrayList allInts = new TIntArrayList(numToTest); Random rand = new Random(); // make sure we test at least one negative int boolean testedNeg = false; for (int i = 0; i < numToTest || !testedNeg; i++) { int nextInt = rand.nextInt(); if (nextInt < 0) { testedNeg = true; } assertTrue(RowFormatter.writeCompactInt(bytes, nextInt, 42) < 47); InternalDataSerializer.writeSignedVL(nextInt, dout); assertTrue(RowFormatter.getCompactIntNumBytes(nextInt) <= 5); allInts.add(nextInt); assertEquals(nextInt, RowFormatter.readCompactInt(bytes, 42)); } // check for some border cases everytime int[] intsToTest = new int[] { 0x40, 0x3f, 0x7f, 0x80, 0x78, 0xffffff40, 0xffffff80, 0xffffffc0, 0xffffff13 }; for (int intToTest : intsToTest) { assertTrue(RowFormatter.writeCompactInt(bytes, intToTest, 42) < 44); InternalDataSerializer.writeSignedVL(intToTest, dout); assertTrue(RowFormatter.getCompactIntNumBytes(intToTest) <= 2); allInts.add(intToTest); assertEquals(intToTest, RowFormatter.readCompactInt(bytes, 42)); } intsToTest = new int[] { 0x6fff, 0x7fff, 0x8fff, 0x8000, 0x4000, 0xa000, 0xb000, 0xc000, 0x1fffff, 0x3fffff, 0x5fffff, 0x7fffff, 0x8fffff, 0x800000, 0x400000, 0xa00000, 0xb00000, 0xc00000, 0xfffffff, 0x1fffffff, 0x5fffffff, 0x7fffffff, 0x8fffffff, 0xffffffff, 0x4000000, 0x8000000, 0x40000000, 0x80000000, 0xa0000000, 0xb0000000, 0xc0000000, 0xf0000000, 0xffff8000, 0xffff4000, 0xffff2000, 0xfffff000, 0xfffffe00, 0xffefffff, 0xffe00000, 0xff100000, 0xfef10000, 0xff800000, 0xff600000, 0xf1000000, 0xfe000000, 0xf3000000, 0x81000000, 0xab000000 }; for (int intToTest : intsToTest) { assertTrue(RowFormatter.writeCompactInt(bytes, intToTest, 42) < 47); InternalDataSerializer.writeSignedVL(intToTest, dout); assertTrue(RowFormatter.getCompactIntNumBytes(intToTest) <= 5); allInts.add(intToTest); assertEquals(intToTest, RowFormatter.readCompactInt(bytes, 42)); } // now check for deserialization using InternalDataSerializer.readCompactInt DataInputStream in = new DataInputStream(new ByteArrayInputStream( bos.toByteArray())); for (int i = 0; i < allInts.size(); i++) { assertEquals(allInts.get(i), (int)InternalDataSerializer.readSignedVL(in)); } } public void testReadAndWriteLongs() throws IOException { byte[] bytes = new byte[256]; int numToTest = 1024; Random rand = new Random(); // make sure we test at least one negative long boolean testedNeg = false; for (int i = 0; i < numToTest || !testedNeg; i++) { long nextLong = rand.nextLong(); if (nextLong < 0) testedNeg = true; assertEquals(8, RowFormatter.writeLong(bytes, nextLong, 42)); assertEquals(nextLong, RowFormatter.readLong(bytes, 42)); } } public void testReadAndWriteSmallInts() { byte[] bytes = new byte[256]; int numToTest = 1024; Random rand = new Random(); // make sure we test at least one negative short boolean testedNeg = false; for (int i = 0; i < numToTest || !testedNeg; i++) { short nextShort = (short)rand.nextInt(); if (nextShort < 0) testedNeg = true; assertEquals(2, new SQLSmallint(nextShort).writeBytes(bytes, 42, null)); SQLSmallint newSmallInt = new SQLSmallint(); assertEquals(2, newSmallInt.readBytes(bytes, 42, 2)); assertEquals(nextShort, ((Integer)newSmallInt.getObject()).intValue()); } } public void testBug49310() throws Exception { Connection conn = getConnection(); Statement stmt = conn.createStatement(); stmt.execute("create table trade.customers (cid int not null, " + "cust_name varchar(100), since date, addr varchar(100), " + "tid int, primary key (cid)) partition by list (tid) " + "(VALUES (14, 1, 2, 10, 4, 5), VALUES (6, 7, 16, 9, 3, 11), " + "VALUES (12, 13, 0, 15, 8, 17))"); stmt.execute("create index idx_1 on trade.customers(cust_name)"); final byte[] bytes1 = new byte[] { 2, 0, 0, 65, 8, 111, 32, 99, 7, -42, 6, 25, 97, 100, 100, 114, 101, 115, 115, 32, 105, 115, 32, 111, 32, 99, 0, 0, 0, 17, 5, 0, 8, 0, 12, 0, 26, 0 }; final byte[] bytes2 = new byte[] { 2, 0, 0, 59, -68, 111, 7, -48, 4, 3, 121, 97, 116, 101, 109, 121, 113, 97, 106, 106, 102, 122, 32, 109, 107, 114, 122, 105, 107, 121, 98, 97, 32, 108, 112, 98, 121, 101, 32, 32, 105, 110, 115, 32, 120, 108, 102, 108, 116, 101, 120, 118, 115, 101, 118, 98, 115, 106, 114, 121, 97, 119, 106, 0, 0, 0, 12, 5, 0, 6, 0, 10, 0, 63, 0 }; RowFormatter rf = ((GemFireContainer)Misc.getRegion("/TRADE/CUSTOMERS", true, false).getUserAttribute()).getCurrentRowFormatter(); DataValueDescriptor[] dvds1 = new DataValueDescriptor[rf.getNumColumns()]; rf.getColumns(bytes1, dvds1, null); assertEquals("o c", dvds1[1].toString()); DataValueDescriptor[] dvds2 = new DataValueDescriptor[rf.getNumColumns()]; rf.getColumns(bytes2, dvds2, null); assertEquals("o", dvds2[1].toString()); // compare DVDs assertTrue(dvds1[1].compare(dvds2[1]) > 0); assertTrue(dvds2[1].compare(dvds1[1]) < 0); // then the serialized forms long off1 = rf.getOffsetAndWidth(2, bytes1); int columnWidth1 = (int)(off1 & 0xFFFFFFFF); int offset1 = (int)(off1 >>> Integer.SIZE); long off2 = rf.getOffsetAndWidth(2, bytes2); int columnWidth2 = (int)(off2 & 0xFFFFFFFF); int offset2 = (int)(off2 >>> Integer.SIZE); assertTrue(SQLChar.compareString(bytes1, offset1, columnWidth1, bytes2, offset2, columnWidth2) > 0); assertTrue(SQLChar.compareString(bytes2, offset2, columnWidth2, bytes1, offset1, columnWidth1) < 0); } public void testGenerateRowWithNotNullableIntegers() throws StandardException, IOException { int hiValue = 10; int loValue = -10; int numFields = (hiValue - loValue) / 2 + 1; int numBytes = numFields * 4 + 1 /* for version */; final ByteArrayOutputStream bos = new ByteArrayOutputStream(); final DataOutputStream dos = new DataOutputStream(bos); InternalDataSerializer.writeSignedVL(1, dos); // for version DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; int index = 0; for (int v = -10; v <= 10; v +=2, index++) { dvds[index] = new SQLInteger(v); // fill in expected bytes dos.writeInt(v); /* int shiftedV = v; for (int bi = 0; bi < 4; bi++) { expectedBytes[(index * 4) + bi] = (byte)shiftedV; shiftedV >>>= 8; } */ } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER_NOT_NULL, null, // default null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); // verify bytes is correct final byte[] expectedBytes = bos.toByteArray(); assertNotNull(bytes); assertEquals(numBytes, bytes.length); assertTrue(Arrays.equals(expectedBytes, bytes)); } public void testGetNotNullableIntegers() throws StandardException { int hiValue = 10; int loValue = -10; int increment = 2; int numFields = (hiValue - loValue) / increment + 1; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; int index = 0; for (int v = -10; v <= 10; v +=2, index++) { dvds[index] = new SQLInteger(v); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER_NOT_NULL, new SQLInteger(0), // default null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify int values index = 0; for (int v = -10; v <= 10; v += 2, index++) { assertEquals("for index=" + index, v, outDvds[index].getObject()); } } public void testGetNullableIntegersNoNulls() throws StandardException { int hiValue = 10; int loValue = -10; int increment = 2; int numFields = (hiValue - loValue) / increment + 1; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; int index = 0; for (int v = -10; v <= 10; v +=2, index++) { dvds[index] = new SQLInteger(v); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify int values index = 0; for (int v = -10; v <= 10; v += 2, index++) { assertEquals("for index=" + index, v, outDvds[index].getObject()); } } public void testFixedWidthCharsWithPadding() throws StandardException { int numFields = 10; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; String[] strings = new String[numFields]; for (int i = 0; i < numFields; i++) { char[] cha = new char[i]; Arrays.fill(cha, (char)('A' + i)); strings[i] = new String(cha); dvds[i] = new SQLChar(new String(strings[i])); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); int expectedBytesLength = 1; // for version for (int p = 1; p <= dvds.length; p++) { DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor( Types.CHAR, false, // not nullable // includes 5 extra characters of padding for test purposes p + 4); expectedBytesLength += p - 1; // now one byte per char + offset bytes below ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLChar(), // default ("") null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); int numOffsetBytes = rf.getNumOffsetBytes(); expectedBytesLength += numOffsetBytes * dvds.length; byte[] bytes = rf.generateBytes(dvds); // test to make sure bytes is expected length for fixed-width chars assertEquals(expectedBytesLength, bytes.length); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify String values char[] paddingChars = new char[5]; Arrays.fill(paddingChars, ' '); //String padding = new String(paddingChars); for (int i = 0; i < numFields; i++) { String expectedString = strings[i];// + padding; assertEquals("for index=" + i, expectedString, outDvds[i].getObject()); } } public void testNullableCharsWithPadding() throws StandardException { int numFields = 10; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; String[] strings = new String[numFields]; for (int i = 0; i < numFields; i++) { char[] cha = new char[i]; Arrays.fill(cha, (char)('A' + i)); strings[i] = new String(cha); dvds[i] = new SQLChar(new String(strings[i])); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); int expectedBytesLength = 1; // for version for (int p = 1; p <= dvds.length; p++) { DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor( Types.CHAR, true, // is nullable // includes 5 extra characters of padding for test purposes p + 4); expectedBytesLength += p - 1; // now one byte per char + offset bytes below ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLChar(), // default ("") null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); int numOffsetBytes = rf.getNumOffsetBytes(); expectedBytesLength += numOffsetBytes * dvds.length; byte[] bytes = rf.generateBytes(dvds); // test to make sure bytes is expected length for fixed-width chars assertEquals(expectedBytesLength, bytes.length); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify String values char[] paddingChars = new char[5]; Arrays.fill(paddingChars, ' '); //String padding = new String(paddingChars); for (int i = 0; i < numFields; i++) { String expectedString = strings[i];// + padding; assertEquals("for index=" + i, expectedString, outDvds[i].getObject()); } } public void testGetVarchars() throws StandardException { int numFields = 10; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; String[] strings = new String[numFields]; for (int i = 0; i < numFields; i++) { char[] cha = new char[i]; Arrays.fill(cha, (char)('A' + i)); strings[i] = new String(cha); dvds[i] = new SQLVarchar(new String(strings[i])); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR), new SQLVarchar(), // default ("") null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify String values for (int i = 0; i < numFields; i++) { assertEquals("for index=" + i, strings[i], outDvds[i].getObject()); } } public void testGetNullableIntegersWithNulls() throws StandardException { // add three nulls in the mix at logical positions 1, 7, and 14 List<Integer> nullPositions = Arrays.asList(1, 7, 13); int hiValue = 10; int loValue = -10; int increment = 2; int numFields = (hiValue - loValue) / increment + nullPositions.size() + 1; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; int v = -10; for (int index = 0; index < numFields; index++) { if (nullPositions.contains(index + 1)) { dvds[index] = new SQLInteger(); // null } else { dvds[index] = new SQLInteger(v); v += 2; } } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify values v = -10; for (int index = 0; index < numFields; index++) { if (nullPositions.contains(index + 1)) { assertEquals("for index=" + index, null, outDvds[index].getObject()); } else { assertEquals("for index=" + index, v, outDvds[index].getObject()); v += 2; } } } /** * Tests the getAsLong API of RowFormatter, if the column type is having type * SQLLong & the value is null; * @throws StandardException */ public void testBug41168() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLLongint(); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLLongint(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); NullHolder wasNull = new NullHolder(); assertEquals(0, rf.getAsLong(1, bytes, wasNull)); assertTrue(wasNull.wasNull); } public void testGetAsDoubleForNullableDoubleWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLDouble(); } DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.DOUBLE,true); // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLDouble(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); NullHolder wasNull = new NullHolder(); assertEquals(0.0, rf.getAsDouble(1, bytes, wasNull)); assertTrue(wasNull.wasNull); } public void testGetAsStringForNullableStringWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLVarchar(); } DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.CHAR,true); // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLChar(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); NullHolder wasNull = new NullHolder(); assertNull(rf.getAsString(1, bytes, wasNull)); assertTrue(wasNull.wasNull); } public void testGetAsByteForNullableByteWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLTinyint(); } DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.TINYINT,true); // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLTinyint(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); NullHolder wasNull = new NullHolder(); assertEquals(0, rf.getAsByte(1, bytes, wasNull)); assertTrue(wasNull.wasNull); } public void testGetAsShortForNullableShortWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLSmallint(); } DataTypeDescriptor dtd = DataTypeDescriptor.SMALLINT; // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLSmallint(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); NullHolder wasNull = new NullHolder(); assertEquals(0, rf.getAsShort(1, bytes, wasNull)); assertTrue(wasNull.wasNull); } public void testGetAsIntForNullableIntWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLInteger(); } DataTypeDescriptor dtd = DataTypeDescriptor.INTEGER; // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLInteger(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); NullHolder wasNull = new NullHolder(); assertEquals(0, rf.getAsInt(1, bytes, wasNull)); assertTrue(wasNull.wasNull); } public void testGetAsFloatForNullableFloatWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLReal(); } DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.FLOAT,true); // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLReal(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); NullHolder wasNull = new NullHolder(); assertEquals(0.0f, rf.getAsFloat(1, bytes, wasNull)); assertTrue(wasNull.wasNull); } public void testGetAsDateForNullableDateWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLDate(); } DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.DATE,true); // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLDate(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); assertNull(rf.getAsDate(1, bytes, Calendar.getInstance(), null)); } public void testGetAsTimeStampForNullableTimeStampWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLTimestamp(); } DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.TIMESTAMP,true); // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLTimestamp(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); assertNull(rf.getAsTimestamp(1, bytes, Calendar.getInstance(), null)); } public void testGetAsTimeForNullableTimeWithNull() throws StandardException { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; for (int index = 0; index < 1; index++) { dvds[index] = new SQLTime(); } DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.TIME,true); // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, dtd, new SQLTime(), // default (null) null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); assertNull(rf.getAsTime(1, bytes, Calendar.getInstance(), null)); } public void testSetColumnsNotNullableIntegers() throws StandardException { int hiValue = 10; int loValue = -10; int increment = 2; int numFields = (hiValue - loValue) / increment + 1; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; int index = 0; for (int v = -10; v <= 10; v +=2, index++) { dvds[index] = new SQLInteger(v); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER_NOT_NULL, new SQLInteger(0), // default null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); // set some columns // set columns at index 0 and 5 to 42 and -42 respectively FormatableBitSet bitSet = new FormatableBitSet(cdl.size()); bitSet.set(0); bitSet.set(5); // sparse array of new values to set DataValueDescriptor[] newDvds = new DataValueDescriptor[cdl.size()]; newDvds[0] = new SQLInteger(42); newDvds[5] = new SQLInteger(-42); bytes = rf.setColumns(bitSet, newDvds, bytes, rf); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify int values assertEquals("for index=0", 42, outDvds[0].getObject()); index = 1; for (int v = -8; v <= 10; v += 2, index++) { if (index == 5) { assertEquals("for index=5", -42, outDvds[5].getObject()); } else { assertEquals("for index=" + index, v, outDvds[index].getObject()); } } } public void testGetMixedTypes() throws StandardException { int numFields = 2; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; dvds[0] = new SQLInteger(-42); dvds[1] = new SQLVarchar("Carpe Diem"); // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); ColumnDescriptor cd = new ColumnDescriptor("c1", 1, DataTypeDescriptor.INTEGER_NOT_NULL, new SQLInteger(0), // default null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR, true, 255); cd = new ColumnDescriptor("c2", 2, dtd, new SQLVarchar(""), // default null, // defaultInfo (UUID)null, // table uuid (UUID)null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); assertEquals(2, outDvds.length); // verify values assertEquals(-42, outDvds[0].getObject()); assertEquals("Carpe Diem", outDvds[1].getObject()); } /* * Test use of One bytes for offset. Using both null and not null null-able * Dvds. */ public void test1BytesOffset() throws Exception { List<Integer> nullPositions = Arrays.asList(1, 7, 13); int hiValue = 10; int loValue = -10; int increment = 2; int numFields = (hiValue - loValue) / increment + nullPositions.size() + 1; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; int v = -10; for (int index = 0; index < numFields; index++) { if (nullPositions.contains(index + 1)) { dvds[index] = new SQLInteger(); // null } else { dvds[index] = new SQLInteger(v); v += 2; } } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(), // default (null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); // check bytes used for offset. assertEquals("Should use a 1 byte offset ", 1, rf.getNumOffsetBytes()); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify values v = -10; for (int index = 0; index < numFields; index++) { if (nullPositions.contains(index + 1)) { assertEquals("for index=" + index, null, outDvds[index].getObject()); } else { assertEquals("for index=" + index, v, outDvds[index].getObject()); v += 2; } } } /* * Test use of two bytes for offset. Using both null and not null null-able * Dvds. */ public void test2BytesOffset() throws Exception { List<Integer> nullPositions = Arrays.asList(1, 7, 13); int hiValue = 100; int loValue = -100; int increment = 2; int numFields = (hiValue - loValue) / increment + nullPositions.size() + 1; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; int v = -100; for (int index = 0; index < numFields; index++) { if (nullPositions.contains(index + 1)) { dvds[index] = new SQLInteger(); // null } else { dvds[index] = new SQLInteger(v); v += 2; } } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(), // default (null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); // check bytes used for offset. assertEquals("Should use a 2 byte offset ", 2, rf.getNumOffsetBytes()); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); // verify values v = -100; for (int index = 0; index < numFields; index++) { if (nullPositions.contains(index + 1)) { assertEquals("for index=" + index, null, outDvds[index].getObject()); } else { assertEquals("for index=" + index, v, outDvds[index].getObject()); v += 2; } } } /* * Test use of three bytes for offset. Using both null and not null null-able * Dvds. This test check the starting value required for a three byte offset. */ public void test3BytesOffsetStartingValue() throws Exception { int numFields = 20; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; String value = "XXXX"; for (int index = 0; index < numFields; index++) { if (index == 0) { dvds[index] = new SQLVarchar(value); continue; } if (index == (numFields -1)) { dvds[index] = new SQLInteger(10); continue; } dvds[index] = new SQLInteger(); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = null; if (p ==1) { cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR), new SQLVarchar(), // default (null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); continue; } if (p == dvds.length) { cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER_NOT_NULL, new SQLInteger(10), // default (null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); continue; } cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(10), // default (null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); // check bytes used for offset. assertEquals("Should use a 3 byte offset ", 3, rf.getNumOffsetBytes()); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); assert (dvds.length == outDvds.length); } /* * Test use of four bytes for offset. Using both null and not null null-able * Dvds. This test check the starting value required for a four byte offset. */ public void test4BytesOffsetEndValue() throws Exception { int numFields = 5018; DataValueDescriptor[] dvds = new DataValueDescriptor[numFields]; for (int index = 0; index < 256; index++) { dvds[index] = new SQLVarchar("XXXX"); } for (int index = 256 ; index < 5017 ; index++) { dvds[index] = new SQLInteger(); } for(int index = 5017 ; index < 5018 ; index++) { dvds[index] = new SQLChar("XXXX"); } // create column descriptors ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= 256; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR), new SQLVarchar(), // default (null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } for (int p = 257; p <= 5017; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(), // default (null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } for (int p = 5018; p <= 5018; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.CHAR, false, 1), new SQLChar(), // default (null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); // check bytes used for offset. assertEquals("Should use a 4 byte offset ", 4, rf.getNumOffsetBytes()); byte[] bytes = rf.generateBytes(dvds); // test getting the columns back out from the bytes DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); assert (dvds.length == outDvds.length); } /* * Test single default value and one byte offset. */ public void testSingleDVDDefaultValue1ByteOffset () throws Exception { DataValueDescriptor[] dvds = new DataValueDescriptor[1]; ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(1), // default (not null) null, // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); assertEquals("Should use a 1 byte offset ", dvds.length, rf.getNumOffsetBytes()); byte[] bytes = rf.generateBytes(dvds); // also the byte array should be of length one ( + 1 for version). assertEquals(2, bytes.length); } /** * Test multiple DVDs with Default value and one byte offset. */ public void testMultiDVDDefaultValue1ByteOffset() throws Exception { DataValueDescriptor[] dvds = new DataValueDescriptor[25]; final int defaultValue = -11; ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(-11), // default (not null) new DefaultInfoImpl(false, "defalueValue-11", new SQLInteger(-11)), // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); assertEquals("Should use a 1 byte offset ", 1, rf.getNumOffsetBytes()); for (int i = 0; i < dvds.length; i++) { dvds[i] = new SQLInteger(-11); // same as default value. } byte[] bytes = rf.generateBytes(dvds); // byte array generated should be the same length as dvds array ( + 1 for // version) assertEquals(dvds.length + 1, bytes.length); DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); for (int index = 0; index < dvds.length; index++) { assertEquals("for index=" + index, defaultValue, outDvds[index].getInt()); } } /** * Test multiple DVDs with Default value and two byte offset. */ public void testMultiDVDDefaultValue2ByteOffset() throws Exception { DataValueDescriptor[] dvds = new DataValueDescriptor[100]; final int defaultValue = -11; ColumnDescriptorList cdl = new ColumnDescriptorList(); for (int p = 1; p <= dvds.length; p++) { ColumnDescriptor cd = new ColumnDescriptor("c" + p, p, DataTypeDescriptor.INTEGER, new SQLInteger(-11), // default (not null) new DefaultInfoImpl(false, "defalueValue-11", new SQLInteger(-11)), // defaultInfo (UUID) null, // table uuid (UUID) null, // default uuid 0L, // autoincStart 0L, // autoincInc 0L,false); // autoincValue cdl.add(cd); } final RowFormatter rf = new RowFormatter(cdl, null, null, 1, null); assertEquals("Should use a 1 byte offset ", 2, rf.getNumOffsetBytes()); for (int i = 0; i < dvds.length; i++) { dvds[i] = new SQLInteger(-11); // same as default value. } byte[] bytes = rf.generateBytes(dvds); // byte array generated should be twice the length of dvds array ( + 1 for // version) assertEquals((dvds.length * 2) + 1, bytes.length); DataValueDescriptor[] outDvds = rf.getAllColumns(bytes); for (int index = 0; index < dvds.length; index++) { assertEquals("for index=" + index, defaultValue, outDvds[index].getInt()); } } /** * Test CompactCompositeRegionKey hashCode, equals etc. with and without key * bytes. Also checks serialization and correct read of CHAR, VARCHAR, * DECIMAL interspersed with fixed/variable width INTs. */ public void testRegionKeyHashEquals() throws Exception { final Connection conn = getConnection(); final Statement stmt = conn.createStatement(); // create a table and insert some data stmt.execute("create table test.table1 (c1 int, c2 varchar(100), " + "c3 char(20) not null, c4 int not null, c5 decimal(30,20), " + "c6 varchar(20) default 'testing', c7 real, primary key (c2, c4))"); // insert some data PreparedStatement pstmt = conn.prepareStatement("insert into test.table1 " + "(c2, c3, c1, c5, c4) values (?, ?, ?, ?, ?)"); for (int id = 1; id <= 20; ++id) { pstmt.setString(1, "id" + id); pstmt.setString(2, "fixed" + id); pstmt.setInt(3, id + 1); pstmt.setBigDecimal(4, new BigDecimal("0.1" + id)); pstmt.setInt(5, id); pstmt.execute(); } final CompactCompositeRegionKey[] keys1 = new CompactCompositeRegionKey[20]; final CompactCompositeRegionKey[] keys2 = new CompactCompositeRegionKey[20]; final CompactCompositeRegionKey[] keys3 = new CompactCompositeRegionKey[20]; final CompactCompositeRegionKey[] keys4 = new CompactCompositeRegionKey[20]; final LocalRegion reg = (LocalRegion)Misc.getRegionForTable("TEST.TABLE1", true); final ExtraTableInfo tabInfo = ((GemFireContainer)reg.getUserAttribute()) .getExtraTableInfo(); final RowFormatter rf = tabInfo.getRowFormatter(); DataValueDescriptor dvd2, dvd4; for (int id = 1; id <= 20; ++id) { dvd2 = new SQLVarchar("id" + id); dvd4 = new SQLInteger(id); keys1[id - 1] = new CompactCompositeRegionKey(new DataValueDescriptor[] { dvd2, dvd4 }, tabInfo); } // lookup keys from region directly int id; for (Object key : reg.keySet()) { CompactCompositeRegionKey ccrk = (CompactCompositeRegionKey)key; // compare against the keys array above id = ccrk.getKeyColumn(1).getInt(); assertTrue("unexpected ID=" + id, id >= 1 && id <= 20); assertNull("unexpected existing value for ID=" + id + ": " + keys2[id - 1], keys2[id - 1]); keys2[id - 1] = ccrk; } // now compare the hashCode and equals for (id = 1; id <= 20; ++id) { assertEquals(id, keys1[id - 1].getKeyColumn(1).getInt()); assertEquals(id, keys2[id - 1].getKeyColumn(1).getInt()); assertEquals("id" + id, keys1[id - 1].getKeyColumn(0).getString()); assertEquals("id" + id, keys2[id - 1].getKeyColumn(0).getString()); assertEquals(keys1[id - 1].hashCode(), keys2[id - 1].hashCode()); assertEquals(keys1[id - 1], keys2[id - 1]); } // clone the key with value bytes and check for hashCode and equals for (id = 1; id <= 20; ++id) { @Retained @Released final Object valBytes = keys2[id - 1].getValueByteSource(); try { keys3[id - 1] = new CompactCompositeRegionKey((byte[]) valBytes, keys2[id - 1].getTableInfo()); } finally { keys2[id - 1].releaseValueByteSource(valBytes); } } for (id = 1; id <= 20; ++id) { assertEquals(id, keys2[id - 1].getKeyColumn(1).getInt()); assertEquals(id, keys3[id - 1].getKeyColumn(1).getInt()); assertEquals("id" + id, keys2[id - 1].getKeyColumn(0).getString()); assertEquals("id" + id, keys3[id - 1].getKeyColumn(0).getString()); assertEquals(keys2[id - 1].hashCode(), keys3[id - 1].hashCode()); assertEquals(keys2[id - 1], keys3[id - 1]); } // now force snapshot from key and check again for (id = 1; id <= 20; ++id) { keys2[id - 1].snapshotKeyFromValue(); } for (id = 1; id <= 20; ++id) { assertEquals(id, keys1[id - 1].getKeyColumn(1).getInt()); assertEquals(id, keys2[id - 1].getKeyColumn(1).getInt()); assertEquals(id, keys3[id - 1].getKeyColumn(1).getInt()); assertEquals("id" + id, keys1[id - 1].getKeyColumn(0).getString()); assertEquals("id" + id, keys2[id - 1].getKeyColumn(0).getString()); assertEquals("id" + id, keys3[id - 1].getKeyColumn(0).getString()); assertEquals(keys1[id - 1].hashCode(), keys2[id - 1].hashCode()); assertEquals(keys1[id - 1].hashCode(), keys3[id - 1].hashCode()); assertEquals(keys1[id - 1], keys2[id - 1]); assertEquals(keys1[id - 1], keys3[id - 1]); } // create new keys with different values but same keys for (id = 1; id <= 20; ++id) { keys4[id - 1] = new CompactCompositeRegionKey( rf.generateBytes(new DataValueDescriptor[] { new SQLInteger(id + 5), new SQLVarchar("id" + id), new SQLChar("testvalue" + id), new SQLInteger(id), new SQLDecimal("75." + id), new SQLVarchar("test" + id), new SQLReal(10.5F) }), tabInfo); } // check for equality and hashCode for (id = 1; id <= 20; ++id) { assertEquals(id, keys1[id - 1].getKeyColumn(1).getInt()); assertEquals(id, keys4[id - 1].getKeyColumn(1).getInt()); assertEquals("id" + id, keys1[id - 1].getKeyColumn(0).getString()); assertEquals("id" + id, keys4[id - 1].getKeyColumn(0).getString()); assertEquals(keys1[id - 1].hashCode(), keys4[id - 1].hashCode()); assertEquals(keys2[id - 1].hashCode(), keys4[id - 1].hashCode()); assertEquals(keys3[id - 1].hashCode(), keys4[id - 1].hashCode()); assertEquals(keys1[id - 1], keys4[id - 1]); assertEquals(keys2[id - 1], keys4[id - 1]); assertEquals(keys3[id - 1], keys4[id - 1]); } // now check for serialization/deserialization final ByteArrayOutputStream bos = new ByteArrayOutputStream(); final DataOutputStream out = new DataOutputStream(bos); for (id = 1; id <= 20; ++id) { DataSerializer.writeObject(keys1[id - 1], out); DataSerializer.writeObject(keys2[id - 1], out); DataSerializer.writeObject(keys3[id - 1], out); DataSerializer.writeObject(keys4[id - 1], out); } // read back and check results final DataInputStream in = new DataInputStream(new ByteArrayInputStream( bos.toByteArray())); for (id = 1; id <= 20; ++id) { for (int i = 1; i <= 4; ++i) { keys4[id - 1] = DataSerializer.readObject(in); // without ExtraTableInfo this should throw exception try { assertEquals("id" + id, keys4[id - 1].getKeyColumn(0).getString()); fail("expected an internal error"); } catch (RuntimeException err) { if (!err.getMessage().contains("tableInfo")) { throw err; } } keys4[id - 1].setRegionContext(reg); assertEquals("id" + id, keys4[id - 1].getKeyColumn(0).getString()); assertEquals(id, keys4[id - 1].getKeyColumn(1).getInt()); assertEquals(keys1[id - 1].hashCode(), keys4[id - 1].hashCode()); assertEquals(keys2[id - 1].hashCode(), keys4[id - 1].hashCode()); assertEquals(keys3[id - 1].hashCode(), keys4[id - 1].hashCode()); assertEquals(keys1[id - 1], keys4[id - 1]); assertEquals(keys2[id - 1], keys4[id - 1]); assertEquals(keys3[id - 1], keys4[id - 1]); } } assertEquals(0, in.available()); // now check using Region.get() for (id = 1; id <= 20; ++id) { assertNotNull("failed1 for ID=" + id, reg.get(keys1[id - 1])); assertNotNull("failed2 for ID=" + id, reg.get(keys2[id - 1])); assertNotNull("failed3 for ID=" + id, reg.get(keys3[id - 1])); assertNotNull("failed4 for ID=" + id, reg.get(keys4[id - 1])); } // check for default values // length of id=1 col should be: 1+6+5+22+4+12+2+2 = 54 byte[] col1Bytes = (byte[])reg.get(keys1[0]); assertNotNull(col1Bytes); // TODO: below should be 54 but derby always creates a DefaultInfo tree // rather than constant for default // The changes added in ColumnDefinitionNode#validateDefault have been // commented out for now due to more changes required elsewhere that do // generation from defaultTree everytime assertEquals(61, col1Bytes.length); assertEquals(new SQLVarchar("testing"), rf.getColumn(6, col1Bytes)); // lastly using an SQL query pstmt = conn.prepareStatement("select c2, c5, c6, c3 from test.table1 " + "where c4=? and c2=?"); ResultSet rs; String fixedStr; for (id = 1; id <= 20; ++id) { pstmt.setInt(1, id); pstmt.setString(2, "id" + id); rs = pstmt.executeQuery(); assertTrue(rs.next()); assertEquals("id" + id, rs.getString(1)); assertEquals(new BigDecimal("0.1" + id).setScale(20), rs.getObject(2)); assertEquals(new BigDecimal("0.1" + id).setScale(20), rs.getBigDecimal(2)); assertEquals("testing", rs.getObject(3)); fixedStr = "fixed" + id; for (int index = fixedStr.length(); index < 20; ++index) { fixedStr += ' '; } assertEquals(fixedStr, rs.getString(4)); assertEquals(fixedStr, rs.getObject(4)); assertFalse(rs.next()); } // drop the table stmt.execute("drop table test.table1"); } /** test the hash distribution when using serialized bytes */ public void testHashDistribution_43271() throws Exception { final Connection conn = getConnection(); final Statement stmt = conn.createStatement(); // create customers table stmt.execute("drop table if exists tpcc.customer"); stmt.execute("create table tpcc.customer (" + "c_w_id integer not null," + "c_d_id integer not null," + "c_id integer not null," + "c_discount decimal(4,4)," + "c_credit char(2)," + "c_last varchar(16)," + "c_first varchar(16)," + "c_credit_lim decimal(12,2)," + "c_balance decimal(12,2)," + "c_ytd_payment float," + "c_payment_cnt integer," + "c_delivery_cnt integer," + "c_street_1 varchar(20)," + "c_street_2 varchar(20)," + "c_city varchar(20)," + "c_state char(2)," + "c_zip char(9)," + "c_phone char(16)," + "c_since timestamp," + "c_middle char(2)," + "c_data varchar(500)" + ") partition by column(c_w_id) BUCKETS 270"); stmt.execute("create table tpcc.stock (" + "s_w_id integer not null," + "s_i_id integer not null," + "s_quantity decimal(4,0)," + "s_ytd decimal(8,2)," + "s_order_cnt integer," + "s_remote_cnt integer," + "s_data varchar(50)," + "s_dist_01 char(24)," + "s_dist_02 char(24)," + "s_dist_03 char(24)," + "s_dist_04 char(24)," + "s_dist_05 char(24)," + "s_dist_06 char(24)," + "s_dist_07 char(24)," + "s_dist_08 char(24)," + "s_dist_09 char(24)," + "s_dist_10 char(24)" + ") partition by column (s_w_id) colocate with (tpcc.customer) BUCKETS 270"); // some inserts into both tables final int numInserts = 270; final PreparedStatement pstmt = conn.prepareStatement("insert into " + "tpcc.customer(c_w_id, c_d_id, c_id) values (?, ?, ?)"); final PreparedStatement pstmt2 = conn.prepareStatement("insert into " + "tpcc.stock(s_w_id, s_i_id) values (?, ?)"); for (int id = 1; id <= numInserts; id++) { pstmt.setInt(1, id); pstmt.setInt(2, id << 1); pstmt.setInt(3, id + 1); pstmt.execute(); pstmt2.setInt(1, id); pstmt2.setInt(2, id + 2); pstmt2.execute(); } // check the created bucket IDs in the two tables final PartitionedRegion prCust = (PartitionedRegion)Misc .getRegionForTable("TPCC.CUSTOMER", true); final PartitionedRegion stCust = (PartitionedRegion)Misc .getRegionForTable("TPCC.STOCK", true); // check for minimum expected buckets final Set<Integer> prBucketIds = prCust.getDataStore() .getAllLocalBucketIds(); final Set<Integer> stBucketIds = stCust.getDataStore() .getAllLocalBucketIds(); assertTrue("less than expected buckets created " + prBucketIds.size() + ": " + prBucketIds, prBucketIds.size() == (numInserts)); assertTrue("less than expected buckets created " + stBucketIds.size() + ": " + stBucketIds, stBucketIds.size() == (numInserts)); } /** test the hash distribution when using expression resolver */ public void testHashDistributionUDF_43271() throws Exception { final Connection conn = getConnection(); final Statement stmt = conn.createStatement(); // create customers table stmt.execute("create table tpcc.customer (" + "c_w_id integer not null," + "c_d_id integer not null," + "c_id integer not null," + "c_discount decimal(4,4)," + "c_credit char(2)," + "c_last varchar(16)," + "c_first varchar(16)," + "c_credit_lim decimal(12,2)," + "c_balance decimal(12,2)," + "c_ytd_payment float," + "c_payment_cnt integer," + "c_delivery_cnt integer," + "c_street_1 varchar(20)," + "c_street_2 varchar(20)," + "c_city varchar(20)," + "c_state char(2)," + "c_zip char(9)," + "c_phone char(16)," + "c_since timestamp," + "c_middle char(2)," + "c_data varchar(500)" + ") partition by(c_w_id)"); stmt.execute("create table tpcc.stock (" + "s_w_id integer not null," + "s_i_id integer not null," + "s_quantity decimal(4,0)," + "s_ytd decimal(8,2)," + "s_order_cnt integer," + "s_remote_cnt integer," + "s_data varchar(50)," + "s_dist_01 char(24)," + "s_dist_02 char(24)," + "s_dist_03 char(24)," + "s_dist_04 char(24)," + "s_dist_05 char(24)," + "s_dist_06 char(24)," + "s_dist_07 char(24)," + "s_dist_08 char(24)," + "s_dist_09 char(24)," + "s_dist_10 char(24)" + ") partition by (s_w_id) colocate with (tpcc.customer)"); // some inserts into both tables final int numInserts = 72; final PreparedStatement pstmt = conn.prepareStatement("insert into " + "tpcc.customer(c_w_id, c_d_id, c_id) values (?, ?, ?)"); final PreparedStatement pstmt2 = conn.prepareStatement("insert into " + "tpcc.stock(s_w_id, s_i_id) values (?, ?)"); for (int id = 1; id <= numInserts; id++) { pstmt.setInt(1, id); pstmt.setInt(2, id << 1); pstmt.setInt(3, id + 1); pstmt.execute(); pstmt2.setInt(1, id); pstmt2.setInt(2, id + 2); pstmt2.execute(); } // check the created bucket IDs in the two tables final PartitionedRegion prCust = (PartitionedRegion)Misc .getRegionForTable("TPCC.CUSTOMER", true); final PartitionedRegion stCust = (PartitionedRegion)Misc .getRegionForTable("TPCC.STOCK", true); // check for minimum expected buckets final Set<Integer> prBucketIds = prCust.getDataStore() .getAllLocalBucketIds(); final Set<Integer> stBucketIds = stCust.getDataStore() .getAllLocalBucketIds(); assertEquals("less than expected buckets created " + prBucketIds.size() + ": " + prBucketIds, numInserts, prBucketIds.size()); assertEquals("less than expected buckets created " + stBucketIds.size() + ": " + stBucketIds, numInserts, stBucketIds.size()); } public void testProjectionWithClobs() throws SQLException { Properties cp = new Properties(); // default is too server... cp.setProperty("host-data", "true"); //cp.setProperty("log-level", "fine"); cp.put(PartitionedRegion.rand.nextBoolean() ? com.pivotal.gemfirexd.Attribute.USERNAME_ATTR : com.pivotal.gemfirexd.Attribute.USERNAME_ALT_ATTR, "Soubhik"); cp.put(com.pivotal.gemfirexd.Attribute.PASSWORD_ATTR, "Soubhik"); cp.setProperty("mcast-port", String.valueOf(AvailablePort .getRandomAvailablePort(AvailablePort.JGROUPS))); Connection conn = TestUtil.getConnection(cp); Statement st = conn.createStatement(); st .execute("create table Layout ( uuid_ varchar(75), plid bigint not null primary key, " + "groupId bigint, companyId bigint, privateLayout smallint, layoutId bigint, " + "parentLayoutId bigint, name varchar(4000), title varchar(4000), description varchar(4000), " + "type_ varchar(75), typeSettings clob, hidden_ smallint, friendlyURL varchar(255), " + "iconImage smallint, iconImageId bigint, themeId varchar(75), colorSchemeId varchar(75), " + "wapThemeId varchar(75), wapColorSchemeId varchar(75), css varchar(4000), priority integer, " + "layoutPrototypeId bigint, dlFolderId bigint)"); st.execute("insert into Layout (uuid_, groupId, companyId, privateLayout, layoutId, parentLayoutId, name, title, description, type_, " + "typeSettings, hidden_, friendlyURL, iconImage, iconImageId, themeId, colorSchemeId, wapThemeId, wapColorSchemeId, css, " + "priority, layoutPrototypeId, dlFolderId, plid) values ('3333-2332-3323-3332', 112, 3323, 33, 3323, 33232, 'NAME : CHAKRABORTY, KUMAR CHAKRABORTY, PRANAB KUMAR CHAKRABORTY, KUMAR PRANAB KUMAR CHAKRABORTY, SOUBHIK KUMAR PRANAB KUMAR CHAKRABORTY'," + " 'title: MISTER, MONSIEUR, だんな (Dan''na), don, जी, ', ' description is self descriptive in name / title. what else ?', 'TYPE-1', " + " cast ( 'dafaafsasfasdfasdfasdfasdfasddfasfasdfasd' as clob), 1, 'http://sb.blogspot.com', 1, 1122, 'THEME-1', 'dfadfa', " + " 'wapTheme-2' , 'wapColorScheme-11', '<html> <!css type fo scripts> ', 1, 1, 1, 1000) "); /* ResultSet rs = st .executeQuery("select layoutimpl0_.plid as plid13_0_, layoutimpl0_.uuid_ as uuid2_13_0_, layoutimpl0_.groupId as groupId13_0_," + " layoutimpl0_.companyId as companyId13_0_, layoutimpl0_.privateLayout as privateL5_13_0_," + " layoutimpl0_.layoutId as layoutId13_0_, layoutimpl0_.parentLayoutId as parentLa7_13_0_," + " layoutimpl0_.name as name13_0_, layoutimpl0_.title as title13_0_, layoutimpl0_.description as descrip10_13_0_," + " layoutimpl0_.type_ as type11_13_0_, layoutimpl0_.hidden_ as hidden13_13_0_, layoutimpl0_.friendlyURL as friendl14_13_0_," + " layoutimpl0_.iconImage as iconImage13_0_, layoutimpl0_.iconImageId as iconIma16_13_0_," + " layoutimpl0_.themeId as themeId13_0_, layoutimpl0_.colorSchemeId as colorSc18_13_0_," + " layoutimpl0_.wapThemeId as wapThemeId13_0_, layoutimpl0_.wapColorSchemeId as wapColo20_13_0_," + " layoutimpl0_.css as css13_0_, layoutimpl0_.priority as priority13_0_," + " layoutimpl0_.layoutPrototypeId as layoutP23_13_0_, layoutimpl0_.dlFolderId as dlFolderId13_0_" + " from Layout layoutimpl0_ where " + "layoutimpl0_.plid=1000"); ResultSet rs = st .executeQuery("select layoutimpl0_.plid as plid13_0_, " + "layoutimpl0_.uuid_ as uuid2_13_0_, " + "layoutimpl0_.groupId as groupId13_0_, " + "layoutimpl0_.companyId as companyId13_0_, " + "layoutimpl0_.privateLayout as privateL5_13_0_, " + "layoutimpl0_.layoutId as layoutId13_0_, " + "layoutimpl0_.parentLayoutId as parentLa7_13_0_, " + "layoutimpl0_.name as name13_0_, " + "layoutimpl0_.title as title13_0_, " + "layoutimpl0_.description as descrip10_13_0_, " + "layoutimpl0_.type_ as type11_13_0_, " + "layoutimpl0_.typeSettings as typeSet12_13_0_, " + "layoutimpl0_.hidden_ as hidden13_13_0_, " + "layoutimpl0_.friendlyURL as friendl14_13_0_, " + "layoutimpl0_.iconImage as iconImage13_0_, " + "layoutimpl0_.iconImageId as iconIma16_13_0_, " + "layoutimpl0_.themeId as themeId13_0_, " + "layoutimpl0_.colorSchemeId as colorSc18_13_0_, " + "layoutimpl0_.wapThemeId as wapThemeId13_0_, " + "layoutimpl0_.wapColorSchemeId as wapColo20_13_0_, " + "layoutimpl0_.css as css13_0_, " + "layoutimpl0_.priority as priority13_0_, " + "layoutimpl0_.layoutPrototypeId as layoutP23_13_0_, " + "layoutimpl0_.dlFolderId as dlFolderId13_0_ " + "from Layout layoutimpl0_ where layoutimpl0_.plid=1000"); */ ResultSet rs = st .executeQuery("select " + "layoutimpl0_.uuid_ as uuid2_13_0_, " + "layoutimpl0_.plid as plid13_0_, " + "layoutimpl0_.typeSettings as typeSet12_13_0_, " + "layoutimpl0_.hidden_ as hidden13_13_0_, " + "title " + "from Layout layoutimpl0_ where layoutimpl0_.plid=1000"); while(rs.next()) { assertEquals(rs.getString(1), "3333-2332-3323-3332"); assertEquals(rs.getLong(2), 1000); assertEquals(rs.getString(3), "dafaafsasfasdfasdfasdfasdfasddfasfasdfasd"); assertEquals(rs.getInt(4), 1); assertEquals("title: MISTER, MONSIEUR, だんな (Dan'na), don, जी, ", rs.getString(5)); } } public void testDecimalString() throws Exception { Random rnd = new Random(); final int numTimes = 1000000; final byte[][] rows = new byte[numTimes][]; for (int i = 0; i < numTimes; i++) { // create a bias towards 1-38 since that is more common boolean bias = (rnd.nextInt(5) != 0); final int precision; if (bias) { precision = 1 + rnd.nextInt(38); } else { precision = 1 + rnd.nextInt(TypeId.DECIMAL_MAXWIDTH); } int scale = rnd.nextInt(precision + 1); boolean negate = rnd.nextBoolean(); int offset; char[] chars; if (negate) { offset = 1; chars = new char[precision + 2]; chars[0] = '-'; } else { offset = 0; chars = new char[precision + 1]; } for (int j = scale; j < precision; j++) { chars[offset++] = (char)(rnd.nextInt(10) + '0'); } chars[offset++] = '.'; for (int j = 1; j <= scale; j++) { chars[offset++] = (char)(rnd.nextInt(10) + '0'); } if (offset != chars.length) { fail("offset=" + offset + " chars.length=" + chars.length); } SQLDecimal dec = new SQLDecimal(chars); rows[i] = new byte[dec.getLengthInBytes(null)]; dec.writeBytes(rows[i], 0, null); } // first check for correctness for (byte[] row : rows) { String s1 = SQLDecimal.getAsBigDecimal(row, 0, row.length) .toPlainString(); String s2 = SQLDecimal.getAsString(row, 0, row.length); assertEquals(s1, s2); } // now perf comparison long start, end; for (int i = 1; i <= 4; i++) { start = System.nanoTime(); for (byte[] row : rows) { SQLDecimal.getAsBigDecimal(row, 0, row.length).toPlainString(); } end = System.nanoTime(); System.out.println("Time taken with BigDecimal " + ((end - start) / 1000000.0) + "ms"); start = System.nanoTime(); for (byte[] row : rows) { SQLDecimal.getAsString(row, 0, row.length); } end = System.nanoTime(); System.out.println("Time taken with optimized impl " + ((end - start) / 1000000.0) + "ms"); } } public void DEBUG_testPXFPerf() throws Exception { // create a few arrays and loop over them writing UTF8 bytes to out Random rnd = new Random(); final String availableChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"; final byte[][] byteArrays = new byte[10][]; int totalBytes = 0; for (int i = 0; i < byteArrays.length; i++) { final byte[] bytes = byteArrays[i] = new byte[512 + rnd.nextInt(512)]; for (int j = 0; j < bytes.length; j++) { bytes[j] = (byte)availableChars.charAt(rnd.nextInt(availableChars.length())); } totalBytes += bytes.length; } final ByteArrayDataOutput out = new ByteArrayDataOutput(); final int numTimes = 500000; final DataTypeDescriptor dtd = DataTypeDescriptor .getBuiltInDataTypeDescriptor(Types.VARCHAR); // warmups first for (int i = 0; i < 100000; i++) { for (byte[] bytes : byteArrays) { DataTypeUtilities.writeAsUTF8BytesForPXF(bytes, 0, bytes.length, dtd, out); } out.clearForReuse(); } // timed runs long start, end; for (int cnt = 1; cnt <= 5; cnt++) { start = System.nanoTime(); for (int i = 0; i < numTimes; i++) { for (byte[] bytes : byteArrays) { DataTypeUtilities.writeAsUTF8BytesForPXF(bytes, 0, bytes.length, dtd, out); } out.clearForReuse(); } end = System.nanoTime(); double rateMB = (1000.0 * totalBytes * numTimes) / (double)(end - start); System.out.println("Total time taken " + ((double)(end - start) / 1000000.0) + "ms at the rate of " + rateMB + " MB/s"); } } public void DEBUG_testBytes() throws Exception { Connection conn = TestUtil.getConnection(); conn.createStatement().execute( "create table trade.customers (cid int not null, " + "cust_name varchar(100), since date, addr varchar(100), " + "tid int, primary key (cid))"); RowFormatter rf = ((GemFireContainer)Misc.getRegion("/TRADE/CUSTOMERS", true, false).getUserAttribute()).getCurrentRowFormatter(); byte[] bytes1 = new byte[] { 0, 0, 3, -107, 110, 97, 109, 101, 57, 54, 7, -45, 4, 4, 97, 100, 100, 114, 101, 115, 115, 32, 105, 115, 32, 110, 97, 109, 101, 57, 54, 0, 0, 0, 16, 4, 0, 10, 0, 14, 0, 31, 0 }; byte[] bytes2 = new byte[] { 0, 0, 3, -107, 110, 97, 109, 101, 57, 49, 55, 7, -45, 4, 4, 97, 100, 100, 114, 101, 115, 115, 32, 105, 115, 32, 110, 97, 109, 101, 57, 49, 55, 0, 0, 0, 16, 4, 0, 11, 0, 15, 0, 33, 0 }; DataValueDescriptor[] dvds1 = new DataValueDescriptor[5]; DataValueDescriptor[] dvds2 = new DataValueDescriptor[5]; rf.getColumns(bytes1, dvds1, null); rf.getColumns(bytes2, dvds2, null); System.out.println("Row1 is: " + Arrays.toString(dvds1)); System.out.println("Row2 is: " + Arrays.toString(dvds2)); } }
apache-2.0
JNDX25219/XiaoShangXing
app/src/main/java/com/xiaoshangxing/yujian/im/CustomMessage/TransmitMessage_WithImage.java
1067
package com.xiaoshangxing.yujian.im.CustomMessage; import com.xiaoshangxing.network.netUtil.NormalKey; import org.json.JSONException; import org.json.JSONObject; /** * Created by FengChaoQun * on 2016/9/16 * 用于转发有图片的动态消息 */ public class TransmitMessage_WithImage extends CustomAttachment { private int state_id; public TransmitMessage_WithImage(int type) { super(type); } @Override protected void parseData(JSONObject data) { try { state_id = data.getInt(NormalKey.id); } catch (JSONException e) { e.printStackTrace(); } } @Override protected JSONObject packData() { JSONObject jsonObject = new JSONObject(); try { jsonObject.put(NormalKey.id, state_id); } catch (JSONException e) { e.printStackTrace(); } return jsonObject; } public int getState_id() { return state_id; } public void setState_id(int state_id) { this.state_id = state_id; } }
apache-2.0
rishabhnayak/ud839_Miwok-Starter-code
app/src/main/java/com/example/android/miwok/ThirdFragment.java
9197
package com.example.android.miwok; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.support.design.widget.FloatingActionButton; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.Button; import android.widget.DatePicker; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.TextView; import java.util.ArrayList; public class ThirdFragment extends Fragment { TBTS_Live_ItemList_Adaptor Adapter3=null; stnName_to_stnCode codeToName; Thread thread4; String value; String key; String origin = null; SharedPreferences sd = null; ListView listview2; DatePicker simpleDatePicker; Button submit; ArrayList<stn_status_Items_Class> words3; LinearLayout disp_content, loading; Handler OnCreateHandler; String dnlddata = null; ProgressBar progressbar; TextView disp_msg; Button retryButton, LiveRetryButton; FloatingActionButton fab; View rootView; Handler TBTSLiveHandler; Boolean oncreateCreated2=false; public ThirdFragment() { // Required empty public constructor } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { System.out.println("OnCreateView Page 3 : Coming Tab..."); sd = getActivity().getSharedPreferences("com.example.android.miwok", Context.MODE_PRIVATE); rootView = inflater.inflate(R.layout.fragment_third, container, false); loading = (LinearLayout) rootView.findViewById(R.id.loading); disp_content = (LinearLayout) rootView.findViewById(R.id.disp_content); progressbar = (ProgressBar) rootView.findViewById(R.id.progressBar); disp_msg = (TextView) rootView.findViewById(R.id.disp_msg); listview2 = (ListView) rootView.findViewById(R.id.listview); codeToName = new stnName_to_stnCode(getActivity()); sd.edit().putBoolean("live_options_recreate",false).apply(); TBTSLiveHandler = new Handler() { @Override public void handleMessage(Message msg) { super.handleMessage(msg); System.out.println(""); System.out.println("fragment,coming,TBTSLiveHandler"); customObject myobj = (customObject) msg.obj; if (myobj.getResult().equals("success") && getActivity() !=null) { words3 = (ArrayList<stn_status_Items_Class>) myobj.getStnsts(); Adapter3 = new TBTS_Live_ItemList_Adaptor(getActivity(), words3); System.out.println("fragment,coming,TBTSLiveHandler,success"); loading.setVisibility(View.GONE); disp_content.setVisibility(View.VISIBLE); listview2.setAdapter(Adapter3); } else if (myobj.getResult().equals("error")) { System.out.println("fragment,coming,TBTSLiveHandler,error"); progressbar.setVisibility(View.GONE); disp_msg.setVisibility(View.VISIBLE); LiveRetryButton.setVisibility(View.VISIBLE); disp_msg.setText(myobj.getErrorMsg()); Log.e("error", myobj.getErrorMsg()); } } }; LiveRetryButton = (Button) rootView.findViewById(R.id.LiveRetryButton); LiveRetryButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { System.out.println("fragment,coming,LiveRetryButton on click"); progressbar.setVisibility(View.VISIBLE); disp_msg.setVisibility(View.GONE); LiveRetryButton.setVisibility(View.GONE); Worker worker1 = new Worker(getActivity(),"tbts_upcoming"); worker1.Input_Details(sd, TBTSLiveHandler, sd.getString("src_code", ""), sd.getString("dstn_code", ""),codeToName); loading.setVisibility(View.VISIBLE); disp_content.setVisibility(View.INVISIBLE); Thread threadu = new Thread(worker1); if (!threadu.getState().equals("RUNNABLE") || !threadu.getState().equals("WAITING")) { System.out.println("fragment,coming,LiveRetryButton ,if part(worker thread restart)"); threadu.start(); } else { System.out.println("fragment,coming,LiveRetryButton ,else part(worker thread not restarted error)"); } } }); listview2.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { Object item = arg0.getItemAtPosition(arg2); System.out.println(words3.get(arg2).getTrainNo() + " : "+words3.get(arg2).getStartDate()); try { Intent i = new Intent(getActivity(), live_train_status_selected_item.class); i.putExtra("trainNo",words3.get(arg2).getTrainNo()); i.putExtra("trainName",words3.get(arg2).getTrainName()); i.putExtra("startDate",words3.get(arg2).getStartDate()); i.putExtra("origin","train_bw_2_stn_upcoming"); startActivity(i); } catch (Exception e) { e.fillInStackTrace(); } } }); oncreateCreated2=true; return rootView; } @Override public void setUserVisibleHint(boolean isVisibleToUser) { super.setUserVisibleHint(isVisibleToUser); System.out.println("SetUserVisible,isVisibleToUser :"+isVisibleToUser+",current tab :"+ trn_bw_2_stn.tabindex); if (isVisibleToUser && trn_bw_2_stn.tabindex == 2) { System.out.println("first if .........."); Thread cheaker= new Thread("threadT1"){ @Override public void run() { if(getviewcheck()){ System.out.println("if part(getviewcheck=true)"); getActivity().runOnUiThread(new Runnable() { @Override public void run() { System.out.println("main thread :"+Thread.currentThread().getName()); getActivity().runOnUiThread(new Runnable() { @Override public void run() { if(Adapter3 !=null) { }else { Worker worker1 = new Worker(getActivity(),"tbts_upcoming"); worker1.Input_Details(sd, TBTSLiveHandler, sd.getString("src_code", ""), sd.getString("dstn_code", ""),codeToName); loading.setVisibility(View.VISIBLE); disp_content.setVisibility(View.INVISIBLE); Thread threadu = new Thread(worker1); System.out.println("fragment,coming,worker defined,if part(worker thread start)"); threadu.start(); } } }); } }); }else{ System.out.println(" unable to understand......"); } } }; // cheaker.start(); }else{ System.out.println("else part of isVisibleToUser && tbts_test.tabindex :"+ trn_bw_2_stn.tabindex); } } private Boolean getviewcheck() { Boolean giveback=false; System.out.println("under getviewcheck fn"); while(oncreateCreated2 !=true){ try { Thread.currentThread().sleep(20); System.out.println(Thread.currentThread().getName()+",whlie,sleep 100 ms"); } catch (InterruptedException e) { e.printStackTrace(); } } if(oncreateCreated2){ System.out.println(Thread.currentThread().getName()+","+"getview() != null"); giveback=true; }else if (!oncreateCreated2){ System.out.println(Thread.currentThread().getName()+","+"getview() = null"); giveback=false; } return giveback; } }
apache-2.0
tiarebalbi/kupo
src/main/java/com/tiarebalbi/kupo/configuration/WebSocketApplicationContext.java
1605
package com.tiarebalbi.kupo.configuration; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; import org.springframework.messaging.simp.config.MessageBrokerRegistry; import org.springframework.web.socket.config.annotation.AbstractWebSocketMessageBrokerConfigurer; import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker; import org.springframework.web.socket.config.annotation.StompEndpointRegistry; import org.springframework.web.socket.server.standard.TomcatRequestUpgradeStrategy; import org.springframework.web.socket.server.support.DefaultHandshakeHandler; /** * Classe de configuração do contexto do websocket * * @author Tiarê Balbi Bonamini * @package com.tiarebalbi.kupo.configuration * @since 1.0.0 * */ @Configuration @EnableWebSocketMessageBroker public class WebSocketApplicationContext extends AbstractWebSocketMessageBrokerConfigurer { @Autowired private Environment env; @Override public void registerStompEndpoints(StompEndpointRegistry registry) { if (env.acceptsProfiles("test.tomcat")) { registry.addEndpoint("/application") .setHandshakeHandler( new DefaultHandshakeHandler(new TomcatRequestUpgradeStrategy())) .withSockJS(); } else { registry.addEndpoint("/application").withSockJS(); } } @Override public void configureMessageBroker(MessageBrokerRegistry registry) { registry.enableSimpleBroker("/queue/", "/topic/"); registry.setApplicationDestinationPrefixes("/app"); } }
apache-2.0
aatma-me/SemanticAPI
src/main/java/me/aatma/library/sapi/jenasclient/iterator/PrimitiveObjectIteratorImpl.java
4765
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package me.aatma.library.sapi.jenasclient.iterator; import me.aatma.library.sapi.jenasclient.ContextImpl; import me.aatma.library.sapi.jenasclient.SCollectionImpl; import me.aatma.library.sapi.jenasclient.SObjectImpl; import me.aatma.library.sapi.Context; import me.aatma.library.sapi.SCollection; import me.aatma.library.sapi.SObject; import me.aatma.library.sapi.iterator.SObjectIterator; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.time.Duration; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.logging.Level; import org.apache.jena.datatypes.RDFDatatype; import org.apache.jena.datatypes.xsd.XSDDatatype; import org.apache.jena.datatypes.xsd.XSDDateTime; import org.apache.jena.datatypes.xsd.XSDDuration; import org.apache.jena.datatypes.xsd.impl.XSDDateType; import org.apache.jena.rdf.model.Literal; import org.apache.jena.rdf.model.NodeIterator; import org.apache.jena.rdf.model.RDFNode; import org.apache.jena.rdf.model.ResIterator; import org.apache.jena.rdf.model.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Vijay * @param <S> */ public class PrimitiveObjectIteratorImpl<S> implements Iterator<S>{ private static final Logger log = LoggerFactory.getLogger(PrimitiveObjectIteratorImpl.class.getCanonicalName()); private final NodeIterator nodeIter; private final Class retType; @Deprecated // Use SObjectIteratorImpl, it will handle both NodeIterator and ResourceIterator // that is, both PrimitiveObjects and SObjects public PrimitiveObjectIteratorImpl(NodeIterator nodeIter, Class<S> retType) { this.nodeIter = nodeIter; this.retType = retType; } @Override public boolean hasNext() { return nodeIter.hasNext(); } /* TODO: We should never return null from the iterator next() method. If for some reason we could not build the required object, we should just iterate next and build the next object. (Be careful to handle hasNext() in that case. We would have to cache the next() in order for the hasNext to be aware of next() method failing in certain conditions) For now we will not implement this, because within the API implementation it should not happen. */ @Override public S next() { RDFNode n; if (nodeIter instanceof NodeIterator) { n = ((NodeIterator)this.nodeIter).next(); } else { // This should never happen because constructor doesn't support taking // arbitrary Iterator implementation throw new RuntimeException("Unsupported iterator."); } if (n.isLiteral()) { Literal l = n.asLiteral(); RDFDatatype dt = l.getDatatype(); // Class c = dt.getJavaClass(); // if (!c.equals(retType)) { // log.warn("The type of literal: " + c + " is not equal or compatible to requested type: " + retType); // return null; // } System.out.println("Is it a string?" + XSDDatatype.XSDstring.equals(dt)); System.out.println("value" + l.getValue()); if ( dt.equals(XSDDatatype.XSDstring) && retType.equals(String.class) //c.equals(String.class) ) { // System.out.println("As java obj: " + l.getString()); // System.out.println("Datatype: " + l.getDatatype().getURI() + " " + l.getDatatype().getJavaClass()); return (S) l.getString(); } else if ((dt.equals(XSDDatatype.XSDdateTime) || dt.equals(XSDDateType.XSDdateTimeStamp)) && retType.equals(Date.class) ) { XSDDateTime xsdDt = (XSDDateTime) l.getValue(); return (S) xsdDt.asCalendar().getTime(); } else if (dt.equals(XSDDatatype.XSDduration) && retType.equals(Duration.class)) { // TODO: Figure out how Jena is building XSDduration. There seem to be no // public constructor, only internal constructor though marked public for // using outside the package, but within the API XSDDuration xsdDur = (XSDDuration) l.getValue(); return (S) Duration.parse(xsdDur.toString()); } else { // others, add when you get to them // Date // Duration return null; } } else { log.warn("The iterator: " + this.getClass().getSimpleName() + " got a node that is not a type " + retType.getSimpleName()); return null; } } }
apache-2.0
gawkermedia/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201511/BaseDynamicAllocationCreative.java
3412
/** * BaseDynamicAllocationCreative.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201511; /** * A base class for dynamic allocation creatives. */ public abstract class BaseDynamicAllocationCreative extends com.google.api.ads.dfp.axis.v201511.Creative implements java.io.Serializable { public BaseDynamicAllocationCreative() { } public BaseDynamicAllocationCreative( java.lang.Long advertiserId, java.lang.Long id, java.lang.String name, com.google.api.ads.dfp.axis.v201511.Size size, java.lang.String previewUrl, com.google.api.ads.dfp.axis.v201511.CreativePolicyViolation[] policyViolations, com.google.api.ads.dfp.axis.v201511.AppliedLabel[] appliedLabels, com.google.api.ads.dfp.axis.v201511.DateTime lastModifiedDateTime, com.google.api.ads.dfp.axis.v201511.BaseCustomFieldValue[] customFieldValues) { super( advertiserId, id, name, size, previewUrl, policyViolations, appliedLabels, lastModifiedDateTime, customFieldValues); } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof BaseDynamicAllocationCreative)) return false; BaseDynamicAllocationCreative other = (BaseDynamicAllocationCreative) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = super.equals(obj); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = super.hashCode(); __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(BaseDynamicAllocationCreative.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "BaseDynamicAllocationCreative")); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
IdearHui/BlogCriber
BlogCriber/src/main/java/Action/BlogAction.java
831
package Action; import Service.BlogServiceImpl; import framework.ActionContext; import framework.ActionUtil; import framework.InstanceFactory; public class BlogAction { BlogServiceImpl bImpl = InstanceFactory.getInstance(BlogServiceImpl.class .getName()); /** * TODO 显示抓取到的博客信息列表 * @author wuhui * */ public void blogList(){ ActionContext.getResponse().setContentType("text/json;charset=UTF-8"); int page = Integer.parseInt(ActionContext.getRequest().getParameter( "page")); int size = Integer.parseInt(ActionContext.getRequest().getParameter( "limit")); System.out.println("第"+page+"页"+"每页显示:"+size); String jsonString = bImpl.ListBlog(page, size); System.out.println(jsonString); ActionUtil.setRespContent(jsonString); } }
apache-2.0
chinaboard/cat
cat-core/src/main/java/com/dianping/cat/message/codec/WaterfallMessageCodec.java
22215
package com.dianping.cat.message.codec; import io.netty.buffer.ByteBuf; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.Stack; import org.unidal.helper.Splitters; import org.unidal.lookup.annotation.Inject; import org.unidal.lookup.extension.Initializable; import org.unidal.lookup.extension.InitializationException; import com.dianping.cat.message.Event; import com.dianping.cat.message.Message; import com.dianping.cat.message.Transaction; import com.dianping.cat.message.spi.MessageCodec; import com.dianping.cat.message.spi.MessageTree; import com.dianping.cat.message.spi.codec.BufferWriter; /** * Local use only, do not use it over network since it only supports one-way encoding */ public class WaterfallMessageCodec implements MessageCodec, Initializable { public static final String ID = "waterfall"; private static final String VERSION = "WF2"; // Waterfall version 2 @Inject private BufferWriter m_writer; private BufferHelper m_bufferHelper; private String[] m_colors = { "#0066ff", "#006699", "#006633", "#0033ff", "#003399", "#003333" }; protected int calculateLines(Transaction t) { int count = 1; for (Message child : t.getChildren()) { if (child instanceof Transaction) { count += calculateLines((Transaction) child); } else if (child instanceof Event) { if (child.getType().equals("RemoteCall")) { count++; } } } return count; } @Override public MessageTree decode(ByteBuf buf) { throw new UnsupportedOperationException("HtmlMessageCodec only supports one-way encoding!"); } @Override public void decode(ByteBuf buf, MessageTree tree) { throw new UnsupportedOperationException("HtmlMessageCodec only supports one-way encoding!"); } @Override public void encode(MessageTree tree, ByteBuf buf) { Message message = tree.getMessage(); if (message instanceof Transaction) { int count = 0; int index = buf.writerIndex(); BufferHelper helper = m_bufferHelper; Transaction t = (Transaction) message; Locator locator = new Locator(); Ruler ruler = new Ruler((int) t.getDurationInMicros()); ruler.setWidth(1400); ruler.setHeight(18 * calculateLines(t) + 10); ruler.setOffsetX(200); ruler.setOffsetY(10); buf.writeInt(0); // place-holder count += helper.table1(buf); count += helper.crlf(buf); count += encodeHeader(tree, buf, ruler); count += encodeRuler(buf, locator, ruler); count += encodeTransaction(tree, t, buf, locator, ruler); count += encodeFooter(tree, buf); count += helper.table2(buf); buf.setInt(index, count); } } protected int encodeFooter(MessageTree tree, ByteBuf buf) { BufferHelper helper = m_bufferHelper; XmlBuilder b = new XmlBuilder(); StringBuilder sb = b.getResult(); b.tag2("g"); b.tag2("svg"); sb.append("</td></tr>"); return helper.write(buf, sb.toString()); } protected int encodeHeader(MessageTree tree, ByteBuf buf, Ruler ruler) { BufferHelper helper = m_bufferHelper; XmlBuilder b = new XmlBuilder(); StringBuilder sb = b.getResult(); sb.append("<tr class=\"header\"><td>"); sb.append(VERSION).append(" ").append(tree.getDomain()).append(" "); sb.append(tree.getHostName()).append(" ").append(tree.getIpAddress()).append(" "); sb.append(tree.getThreadGroupName()).append(" ").append(tree.getThreadId()).append(" "); sb.append(tree.getThreadName()).append(" ").append(tree.getMessageId()).append(" "); sb.append(tree.getParentMessageId()).append(" ").append(tree.getRootMessageId()).append(" "); sb.append(tree.getSessionToken()).append(" "); sb.append("</td></tr>"); sb.append("<tr><td>"); int height = ruler.getHeight(); int width = ruler.getWidth(); b.add("<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\r\n"); b.tag1("svg", "x", 0, "y", 0, "width", width, "height", height, "viewBox", "0,0," + width + "," + height, "xmlns", "http://www.w3.org/2000/svg", "version", "1.1"); b.tag1("g", "font-size", "12", "stroke", "gray"); return helper.write(buf, sb.toString()); } protected int encodeRemoteCall(MessageTree tree, Event event, ByteBuf buf, Locator locator, Ruler ruler) { int count = 0; locator.downLevel(true); locator.nextLine(); count += encodeRemoteCallLine(tree, event, buf, locator, ruler); locator.upLevel(); return count; } protected int encodeRemoteCallLine(MessageTree tree, Event event, ByteBuf buf, Locator locator, Ruler ruler) { BufferHelper helper = m_bufferHelper; XmlBuilder b = new XmlBuilder(); StringBuilder sb = b.getResult(); int width = 6; int height = 18; int x = 0; int y = locator.getLine() * height + ruler.getOffsetY(); String logviewId = String.valueOf(event.getData()); b.branch(locator, x, y, width, height); x += locator.getLevel() * width; b.tagWithText("text", "<a href='#'>[:: show ::]</a>", "x", x + 2, "y", y - 5, "font-size", "16", "stroke-width", "0", "fill", "blue", "onclick", "popup('" + logviewId + "');"); return helper.write(buf, sb.toString()); } protected int encodeRuler(ByteBuf buf, Locator locator, Ruler ruler) { BufferHelper helper = m_bufferHelper; XmlBuilder b = new XmlBuilder(); StringBuilder sb = b.getResult(); PathBuilder p = new PathBuilder(); int height = ruler.getHeight(); b.tag1("g", "id", "ruler", "font-size", "12", "text-anchor", "middle", "stroke", "black", "stroke-width", "1"); int unitNum = ruler.getUnitNum(); int unitStep = ruler.getUnitStep(); int unit = (int) ruler.getUnit(); int x = ruler.getOffsetX(); int y = 10; for (int i = 0; i <= unitNum; i++) { String text; if (unitStep >= 1000) { text = (i * unitStep / 1000) + "ms"; } else { text = (i * unitStep) + "us"; } b.tagWithText("text", text, "x", x + i * unit, "y", y, "stroke-width", "0"); } for (int i = 0; i <= unitNum; i++) { b.tag("path", "d", p.moveTo(x + i * unit, y + 6).v(height).build(), "stroke-dasharray", "3,4"); } b.tag2("g"); return helper.write(buf, sb.toString()); } protected int encodeTransaction(MessageTree tree, Transaction transaction, ByteBuf buf, Locator locator, Ruler ruler) { List<Message> children = getVisibleChildren(transaction); int count = 0; locator.downLevel(children.isEmpty()); locator.nextLine(); count += encodeTransactionLine(tree, transaction, buf, locator, ruler); int len = children.size(); for (int i = 0; i < len; i++) { Message child = children.get(i); locator.setLast(i == len - 1); if (child instanceof Transaction) { count += encodeTransaction(tree, (Transaction) child, buf, locator, ruler); } else if (child instanceof Event && "RemoteCall".equals(child.getType())) { count += encodeRemoteCall(tree, (Event) child, buf, locator, ruler); } } locator.upLevel(); return count; } protected int encodeTransactionLine(MessageTree tree, Transaction t, ByteBuf buf, Locator locator, Ruler ruler) { BufferHelper helper = m_bufferHelper; XmlBuilder b = new XmlBuilder(); int width = 6; int height = 18; int x = 0; int y = locator.getLine() * height + ruler.getOffsetY(); String tid = "t" + locator.getLine(); long t0 = tree.getMessage().getTimestamp(); long t1 = t.getTimestamp(); int rx = ruler.calcX((t1 - t0) * 1000); int rw = ruler.calcWidth(t.getDurationInMicros() * 1000); int[] segments = getTransactionDurationSegments(t); b.branch(locator, x, y, width, height); x += locator.getLevel() * width; if (t.getStatus().equals("0")) { b.tag1("text", "x", x, "y", y - 5, "font-weight", "bold", "stroke-width", "0"); } else { b.tag1("text", "x", x, "y", y - 5, "font-weight", "bold", "stroke-width", "0", "fill", "red"); } b.add(t.getType()).newLine(); b.tag("set", "attributeName", "fill", "to", "red", "begin", tid + ".mouseover", "end", tid + ".mouseout"); b.tag2("text"); if (segments == null) { String durationInMillis = String.format("%.2f %s", t.getDurationInMicros() / 1000.0, t.getName()); b.tag("rect", "x", rx + 1, "y", y - 15, "width", rw, "height", height - 2, "fill", "#0066ff", "opacity", "0.5"); b.tagWithText("text", durationInMillis, "x", rx + 5, "y", y - 3, "font-size", "11", "stroke-width", "0"); } else { int index = 0; for (int segment : segments) { int w = ruler.calcWidth(segment); String durationInMillis = String.format("%.2f %s", segment / 1000.0 / 1000.0, index == 0 ? t.getName() : ""); String color = m_colors[index % m_colors.length]; b.tag("rect", "x", rx + 1, "y", y - 15, "width", w, "height", height - 2, "fill", color, "opacity", "0.5"); b.tagWithText("text", durationInMillis, "x", rx + 5, "y", y - 3, "font-size", "11", "stroke-width", "0"); index++; rx += w; } } b.tag("rect", "id", tid, "x", ruler.getOffsetX() + 1, "y", y - 15, "width", ruler.getWidth(), "height", height, "fill", "#ffffff", "stroke-width", "0", "opacity", "0.01"); return helper.write(buf, b.getResult().toString()); } private int[] getTransactionDurationSegments(Transaction t) { String data = t.getData().toString(); if (data.startsWith("_m=")) { int pos = data.indexOf('&'); String str; if (pos < 0) { str = data.substring(3); } else { str = data.substring(3, pos); } List<String> parts = Splitters.by(',').split(str); int len = parts.size(); int[] segments = new int[len]; for (int i = 0; i < len; i++) { String part = parts.get(i); try { segments[i] = Integer.parseInt(part) * 1000; } catch (Exception e) { // ignore it } } return segments; } else if (data.startsWith("_u=")) { int pos = data.indexOf('&'); String str; if (pos < 0) { str = data.substring(3); } else { str = data.substring(3, pos); } List<String> parts = Splitters.by(',').split(str); int len = parts.size(); int[] segments = new int[len]; for (int i = 0; i < len; i++) { String part = parts.get(i); try { segments[i] = Integer.parseInt(part); } catch (Exception e) { // ignore it } } return segments; } else { return null; } } protected List<Message> getVisibleChildren(Transaction parent) { List<Message> children = new ArrayList<Message>(); for (Message child : parent.getChildren()) { if (child instanceof Transaction) { children.add(child); } else if (child instanceof Event && "RemoteCall".equals(child.getType())) { children.add(child); } } return children; } @Override public void initialize() throws InitializationException { m_bufferHelper = new BufferHelper(m_writer); } public void setBufferWriter(BufferWriter writer) { m_writer = writer; m_bufferHelper = new BufferHelper(m_writer); } protected static class BufferHelper { private static byte[] TABLE1 = "<table class=\"logview\">".getBytes(); private static byte[] TABLE2 = "</table>".getBytes(); private static byte[] TR1 = "<tr>".getBytes(); private static byte[] TR2 = "</tr>".getBytes(); private static byte[] TD1 = "<td>".getBytes(); private static byte[] TD2 = "</td>".getBytes(); private static byte[] NBSP = "&nbsp;".getBytes(); private static byte[] CRLF = "\r\n".getBytes(); private BufferWriter m_writer; public BufferHelper(BufferWriter writer) { m_writer = writer; } public int crlf(ByteBuf buf) { buf.writeBytes(CRLF); return CRLF.length; } public int nbsp(ByteBuf buf, int count) { for (int i = 0; i < count; i++) { buf.writeBytes(NBSP); } return count * NBSP.length; } public int table1(ByteBuf buf) { buf.writeBytes(TABLE1); return TABLE1.length; } public int table2(ByteBuf buf) { buf.writeBytes(TABLE2); return TABLE2.length; } public int td(ByteBuf buf, String str) { return td(buf, str, null); } public int td(ByteBuf buf, String str, String attributes) { if (str == null) { str = "null"; } byte[] data = str.getBytes(); int count = 0; if (attributes == null) { buf.writeBytes(TD1); count += TD1.length; } else { String tag = "<td " + attributes + ">"; byte[] bytes = tag.getBytes(); buf.writeBytes(bytes); count += bytes.length; } buf.writeBytes(data); count += data.length; buf.writeBytes(TD2); count += TD2.length; return count; } public int td1(ByteBuf buf) { buf.writeBytes(TD1); return TD1.length; } public int td1(ByteBuf buf, String attributes) { if (attributes == null) { buf.writeBytes(TD1); return TD1.length; } else { String tag = "<td " + attributes + ">"; byte[] bytes = tag.getBytes(); buf.writeBytes(bytes); return bytes.length; } } public int td2(ByteBuf buf) { buf.writeBytes(TD2); return TD2.length; } public int tr1(ByteBuf buf, String styleClass) { if (styleClass == null) { buf.writeBytes(TR1); return TR1.length; } else { String tag = "<tr class=\"" + styleClass + "\">"; byte[] bytes = tag.getBytes(); buf.writeBytes(bytes); return bytes.length; } } public int tr2(ByteBuf buf) { buf.writeBytes(TR2); return TR2.length; } public int write(ByteBuf buf, byte b) { buf.writeByte(b); return 1; } public int write(ByteBuf buf, String str) { if (str == null) { str = "null"; } byte[] data = str.getBytes(); buf.writeBytes(data); return data.length; } public int writeRaw(ByteBuf buf, String str) { if (str == null) { str = "null"; } byte[] data; try { data = str.getBytes("utf-8"); } catch (UnsupportedEncodingException e) { data = str.getBytes(); } return m_writer.writeTo(buf, data); } } protected static class Locator { private int m_level; private int m_line; private Stack<Boolean> m_last = new Stack<Boolean>(); private Stack<Integer> m_flags = new Stack<Integer>(); public void downLevel(boolean atomic) { if (m_level > 0) { boolean last = m_last.peek(); m_flags.pop(); if (last) { m_flags.push(6); // 00110 } else { m_flags.push(22); // 10110 } for (int i = 0; i < m_level - 1; i++) { Integer flag = m_flags.get(i); int f = flag; if (flag == 6) { // 00110 f = 0; // 00000 } else if (flag == 22) { // 10110 f = 20; // 10100 } m_flags.set(i, f); } } boolean root = m_level == 0; if (atomic) { if (root) { m_flags.push(1); // 00001 } else { m_flags.push(9); // 01001 } } else { if (root) { m_flags.push(17); // 10001 } else { m_flags.push(25); // 11001 } } m_last.push(root ? true : false); m_level++; } public Stack<Integer> getFlags() { return m_flags; } public boolean getLast(int level) { return m_last.get(level); } public int getLevel() { return m_level; } public int getLine() { return m_line; } public boolean isFirst() { return m_level == 1; } public boolean isLast() { return m_last.peek(); } public void nextLine() { m_line++; } public void setLast(boolean last) { m_last.pop(); m_last.push(last); } @Override public String toString() { return String.format("Locator[level=%s, line=%s, first=%s, last=%s]", m_level, m_line, isFirst(), isLast()); } public void upLevel() { m_level--; m_last.pop(); m_flags.pop(); } } protected static class PathBuilder { private int m_marker; private StringBuilder m_sb = new StringBuilder(64); public String build() { String result = m_sb.toString(); m_sb.setLength(0); return result; } public PathBuilder h(int deltaX) { m_sb.append(" h").append(deltaX); return this; } public PathBuilder m(int deltaX, int deltaY) { m_sb.append(" m").append(deltaX).append(',').append(deltaY); return this; } public PathBuilder mark() { m_marker = m_sb.length(); return this; } public PathBuilder moveTo(int x, int y) { m_sb.append('M').append(x).append(',').append(y); return this; } public PathBuilder repeat(int count) { int pos = m_sb.length(); for (int i = 0; i < count; i++) { m_sb.append(m_sb.subSequence(m_marker, pos)); } return this; } public PathBuilder v(int deltaY) { m_sb.append(" v").append(deltaY); return this; } } protected static class Ruler { private static final int[] UNITS = { 1, 2, 3, 5 }; private int m_maxValue; private int m_unitNum; private int m_unitStep; public int m_width; private int m_height; private int m_offsetX; private int m_offsetY; public Ruler(int maxValue) { m_maxValue = maxValue; int e = 1; int value = maxValue; while (true) { if (value > 50) { value = (value + 9) / 10; e *= 10; } else { if (value < 6) { m_unitNum = value; m_unitStep = e; } else { for (int unit : UNITS) { int num = (value + unit - 1) / unit; if (num >= 6 && num <= 10) { m_unitNum = num; m_unitStep = unit * e; break; } } } break; } } } public int calcWidth(long timeInMicros) { int w = (int) (timeInMicros * getUnit() / m_unitStep / 1000); if (w == 0 && timeInMicros > 0) { w = 1; } return w; } public int calcX(long timeInMillis) { int w = (int) (timeInMillis * getUnit() / m_unitStep); if (w == 0 && timeInMillis > 0) { w = 1; } return w + m_offsetX; } public int getHeight() { return m_height; } public int getMaxValue() { return m_maxValue; } public int getOffsetX() { return m_offsetX; } public int getOffsetY() { return m_offsetY; } public double getUnit() { return (m_width - m_offsetX - 20) * 1.0 / m_unitNum; } public int getUnitNum() { return m_unitNum; } public int getUnitStep() { return m_unitStep; } public int getWidth() { return m_width; } public void setHeight(int height) { m_height = height; } public void setOffsetX(int offsetX) { m_offsetX = offsetX; } public void setOffsetY(int offsetY) { m_offsetY = offsetY; } public void setWidth(int width) { m_width = width; } @Override public String toString() { return String.format("[%s, %s, %s]", m_maxValue, m_unitNum, m_unitStep); } } protected static class XmlBuilder { private boolean m_compact; private int m_level; private StringBuilder m_sb = new StringBuilder(8192); public XmlBuilder add(String text) { m_sb.append(text); return this; } public void branch(Locator locator, int x, int y, int width, int height) { PathBuilder p = new PathBuilder(); int w = width / 2; int h = height / 2; int r = 2; for (Integer flag : locator.getFlags()) { int cx = x + w; int cy = y - h; if ((flag & 2) != 0) { // 00010 tag("path", "d", p.moveTo(cx, cy).h(w).build()); } if ((flag & 4) != 0) { // 00100 tag("path", "d", p.moveTo(cx, cy).v(-h).build()); } if ((flag & 8) != 0) { // 01000 tag("path", "d", p.moveTo(cx, cy).h(-w).build()); } if ((flag & 16) != 0) { // 10000 tag("path", "d", p.moveTo(cx, cy).v(h).build()); } if ((flag & 1) != 0) { // 00001 m_sb.append("<circle cx=\"").append(cx).append("\" cy=\"").append(cy).append("\" r=\"").append(r) .append("\" stroke=\"red\" fill=\"white\"/>"); } x += width; } } public XmlBuilder element(String name, String value) { indent(); m_sb.append('<').append(name).append('>'); m_sb.append(value); m_sb.append("</").append(name).append(">"); newLine(); return this; } public StringBuilder getResult() { return m_sb; } public XmlBuilder indent() { if (!m_compact) { for (int i = m_level - 1; i >= 0; i--) { m_sb.append(" "); } } return this; } public XmlBuilder newLine() { m_sb.append("\r\n"); return this; } public XmlBuilder tag(String name, Object... attributes) { return tagWithText(name, null, attributes); } public XmlBuilder tag1(String name, Object... attributes) { indent(); m_sb.append('<').append(name); int len = attributes.length; for (int i = 0; i < len; i += 2) { Object key = attributes[i]; Object val = attributes[i + 1]; if (val != null) { m_sb.append(' ').append(key).append("=\"").append(val).append('"'); } } m_sb.append(">"); newLine(); m_level++; return this; } public XmlBuilder tag2(String name) { m_level--; indent(); m_sb.append("</").append(name).append(">"); newLine(); return this; } public XmlBuilder tagWithText(String name, Object text, Object... attributes) { indent(); m_sb.append('<').append(name); int len = attributes.length; for (int i = 0; i < len; i += 2) { Object key = attributes[i]; Object val = attributes[i + 1]; if (val != null) { m_sb.append(' ').append(key).append("=\"").append(val).append('"'); } } if (text == null) { m_sb.append("/>"); } else { m_sb.append('>').append(text).append("</").append(name).append('>'); } newLine(); return this; } } }
apache-2.0
ruspl-afed/dbeaver
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/data/managers/BitStringValueManager.java
1831
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.data.managers; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.ui.data.IValueController; import org.jkiss.dbeaver.ui.data.IValueEditor; import org.jkiss.dbeaver.ui.data.editors.BitStringInlineEditor; import org.jkiss.dbeaver.ui.dialogs.data.DefaultValueViewDialog; /** * Bit string value handler */ public class BitStringValueManager extends BaseValueManager { @NotNull @Override public IValueController.EditType[] getSupportedEditTypes() { return new IValueController.EditType[] {IValueController.EditType.INLINE, IValueController.EditType.PANEL, IValueController.EditType.EDITOR}; } @Nullable @Override public IValueEditor createEditor(@NotNull IValueController controller) throws DBException { switch (controller.getEditType()) { case INLINE: case PANEL: return new BitStringInlineEditor(controller); case EDITOR: return new DefaultValueViewDialog(controller); default: return null; } } }
apache-2.0
ppavlidis/Gemma
gemma-core/src/test/java/ubic/gemma/core/loader/expression/geo/AbstractGeoServiceTest.java
1377
/* * The Gemma project * * Copyright (c) 2006 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.core.loader.expression.geo; import ubic.basecode.util.FileTools; import ubic.gemma.core.util.test.BaseSpringContextTest; import java.net.URISyntaxException; /** * @author pavlidis */ public abstract class AbstractGeoServiceTest extends BaseSpringContextTest { private static final String GEO_TEST_DATA_ROOT = "/data/loader/expression/geo/"; protected String getTestFileBasePath() throws URISyntaxException { return FileTools.resourceToPath( AbstractGeoServiceTest.GEO_TEST_DATA_ROOT ); } protected String getTestFileBasePath( String subPath ) throws URISyntaxException { return FileTools.resourceToPath( AbstractGeoServiceTest.GEO_TEST_DATA_ROOT + subPath ); } }
apache-2.0
alu0100887686/DAA_L1_5_LCS
source/lcs/memoizing/BottomUp.java
826
package lcs.memoizing; public final class BottomUp extends Lcs{ public BottomUp(String a, String b){ super(a, b); } public static void setUp(String a, String b){ Lcs.setUp(a, b); } public int lcs() { setnOperations(0); for (int i = 0; i < super.getTable().getN(); i++) for (int j = 0; j < super.getTable().getM(); j++){ if(i == 0 || j == 0) table.set(i, j, 0); } for (int i = 1; i <= super.getA().length(); i++){ for (int j = 1; j <= super.getB().length(); j++){ if(super.getA().charAt(i-1) == super.getB().charAt(j-1)) table.set(i, j, table.get(i - 1, j - 1) + 1); else table.set(i, j, Math.max(table.get(i - 1, j), table.get(i, j - 1))); setnOperations(getnOperations()+1); } } return table.get(super.getA().length(), super.getB().length()); } }
apache-2.0
VT-Visionarium/osnap
src/main/java/x3d/fields/SFImage.java
4615
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package x3d.fields; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; import java.util.StringTokenizer; import java.util.ArrayList; import java.util.Formatter; /** * * @author peter */ @XmlAccessorType(XmlAccessType.NONE) @XmlType(name = "SFImage") public class SFImage extends X3DField{ private Integer width; private Integer height; private Integer components; private ArrayList<Integer> pixels; public Integer getComponents() { return components; } public void setComponents(Integer components) throws IllegalArgumentException{ if (components < 0 || components > 4){ throw new IllegalArgumentException("Number of components of an SFImage must be between 0 and 4"); } this.components = components; } public Integer getHeight() { return height; } public void setHeight(Integer height) throws IllegalArgumentException{ if (height < 0){ throw new IllegalArgumentException("Height of SFImage cannot be smaller zero!"); } this.height = height; } public ArrayList<Integer> getPixels() { return pixels; } public void setPixels(ArrayList<Integer> pixels) throws IllegalArgumentException{ if (pixels.size() != (this.width * this.height)){ throw new IllegalArgumentException("Number of pixels (" + pixels.size() + ") deviates from number of pixels calculated (" + (width*height) +")!" ); } this.pixels = pixels; } public void addPixel (Integer pixel) throws IllegalArgumentException { if (pixels.size() == (this.width * this.height)){ throw new IllegalArgumentException ("Number of pixels already at capacity!" ); } else { this.pixels.add(pixel); } } public void clearPixels (){ this.pixels.clear(); } public Integer getWidth() { return width; } public void setWidth(Integer width) throws IllegalArgumentException{ if (width < 0){ throw new IllegalArgumentException("Width of SFImage cannot be smaller zero!"); } this.width = width; } public SFImage() { this.width = 0; this.height = 0; this.components = 0; this.pixels = new ArrayList<>(); } @XmlValue public String getStringValue() { return this.toString(); } public void setStringValue(String value) throws IllegalArgumentException { //ArrayList<String> tokenList = X3DArrayFieldTokenizer.getListFromString(value); //for (String token : tokenList){ //StringTokenizer tokenizer = new StringTokenizer(token, " ", false); StringTokenizer tokenizer = new StringTokenizer(value, " ", false); String widthToken = tokenizer.nextToken(); String heightToken = tokenizer.nextToken(); String componentsToken = tokenizer.nextToken(); this.width = Integer.parseInt(widthToken); this.height = Integer.parseInt(heightToken); this.components = Integer.parseInt(componentsToken); ArrayList<Integer> somePixels = new ArrayList<>(); while (tokenizer.hasMoreTokens()){ try{ String theToken = tokenizer.nextToken(); Integer theValue = Integer.decode(theToken); somePixels.add(theValue); } catch (Exception ex){ throw new IllegalArgumentException(ex.getMessage()); } } this.setPixels(somePixels); //} } @Override public String toString() { String returnValue = new String(); returnValue += width + " " + height + " " + components; if (!this.pixels.isEmpty()){ StringBuilder stringBuilder = new StringBuilder(); Formatter formatter = new Formatter(stringBuilder); for (Integer value : this.pixels){ formatter.format("%scale", " "); formatter.format("%#x", value); } returnValue += stringBuilder.toString(); formatter.close(); formatter.close(); } return returnValue; } }
apache-2.0
NiceSystems/hrider
src/main/java/hrider/ui/forms/AddFamilyDialog.java
13482
package hrider.ui.forms; import com.intellij.uiDesigner.core.GridConstraints; import com.intellij.uiDesigner.core.GridLayoutManager; import hrider.data.ColumnFamily; import hrider.ui.design.JCellEditor; import hrider.ui.design.JTableModel; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableCellEditor; import java.awt.*; import java.awt.event.*; import java.util.Map; /** * Copyright (C) 2012 NICE Systems ltd. * <p/> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author Igor Cher * @version %I%, %G% */ public class AddFamilyDialog extends JDialog { //region Variables private static final long serialVersionUID = 5050481736896721779L; private JPanel contentPane; private JButton buttonOK; private JButton buttonCancel; private JTextField columnFamilyTextField; private JButton addButton; private JButton removeButton; private DefaultTableModel metadataModel; private JTable metadataTable; private boolean okPressed; private ColumnFamily columnFamily; //endregion //region Constructor public AddFamilyDialog(ColumnFamily family) { setContentPane(contentPane); setModal(true); setTitle(String.format("%s column family", family == null ? "Create new" : "Update")); getRootPane().setDefaultButton(buttonOK); this.columnFamily = family; if (this.columnFamily == null) { this.columnFamily = new ColumnFamily("CF"); } columnFamilyTextField.setText(this.columnFamily.getName()); this.metadataModel = new DefaultTableModel(); this.metadataTable.setModel(this.metadataModel); this.metadataModel.addColumn("Key"); this.metadataModel.addColumn("Value"); this.metadataTable.setRowHeight(this.metadataTable.getFont().getSize() + 8); this.metadataTable.setAutoResizeMode(JTable.AUTO_RESIZE_SUBSEQUENT_COLUMNS); this.metadataTable.getColumn("Key").setCellEditor(new JCellEditor(null, true)); this.metadataTable.getColumn("Value").setCellEditor(new JCellEditor(null, true)); this.metadataTable.getSelectionModel().addListSelectionListener( new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { removeButton.setEnabled(true); } }); for (Map.Entry<String, String> entry : this.columnFamily.getMetadata().entrySet()) { metadataModel.addRow(new Object[]{entry.getKey(), entry.getValue()}); } buttonOK.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { onOK(); } }); buttonCancel.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { onCancel(); } }); // call onCancel() when cross is clicked setDefaultCloseOperation(DO_NOTHING_ON_CLOSE); addWindowListener( new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { onCancel(); } }); // call onCancel() on ESCAPE contentPane.registerKeyboardAction( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { onCancel(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); addButton.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { metadataModel.addRow(new Object[]{"", ""}); int row = metadataModel.getRowCount() - 1; metadataTable.setRowSelectionInterval(row, row); metadataTable.scrollRectToVisible(metadataTable.getCellRect(row, 0, false)); } }); removeButton.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (metadataTable.getRowCount() > 0) { JTableModel.stopCellEditing(metadataTable); int[] selectedRows = metadataTable.getSelectedRows(); for (int selectedRow : selectedRows) { metadataModel.removeRow(selectedRow); } } } }); } //endregion //region Public Methods public boolean showDialog(Component owner) { this.setComponentOrientation(owner.getComponentOrientation()); this.pack(); this.setResizable(false); this.setLocationRelativeTo(owner); this.setVisible(true); return this.okPressed; } public ColumnFamily getColumnFamily() { if (this.okPressed) { return columnFamily; } return null; } //endregion //region Private Methods private void onOK() { JTableModel.stopCellEditing(metadataTable); if (this.columnFamilyTextField.getText().trim().isEmpty()) { JOptionPane.showMessageDialog(this, "The column family is required.", "Error", JOptionPane.ERROR_MESSAGE); } else { this.okPressed = true; for (int i = 0 ; i < metadataTable.getRowCount() ; i++) { String key = (String)metadataTable.getValueAt(i, 0); String val = (String)metadataTable.getValueAt(i, 1); if (key != null && !key.isEmpty() && val != null && !val.isEmpty()) { columnFamily.setValue(key, val); } } columnFamily.setName(columnFamilyTextField.getText().trim()); dispose(); } } private void onCancel() { // add your code here if necessary dispose(); } //endregion { // GUI initializer generated by IntelliJ IDEA GUI Designer // >>> IMPORTANT!! <<< // DO NOT EDIT OR ADD ANY CODE HERE! $$$setupUI$$$(); } /** * Method generated by IntelliJ IDEA GUI Designer * >>> IMPORTANT!! <<< * DO NOT edit this method OR call it in your code! * * @noinspection ALL */ private void $$$setupUI$$$() { contentPane = new JPanel(); contentPane.setLayout(new GridLayoutManager(5, 2, new Insets(10, 10, 10, 10), -1, -1)); contentPane.setMaximumSize(new Dimension(-1, -1)); contentPane.setMinimumSize(new Dimension(-1, -1)); contentPane.setPreferredSize(new Dimension(350, 300)); final JPanel panel1 = new JPanel(); panel1.setLayout(new GridLayoutManager(2, 1, new Insets(0, 0, 0, 0), -1, -1)); contentPane.add( panel1, new GridConstraints( 4, 0, 1, 2, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, 1, null, null, null, 0, false)); final JPanel panel2 = new JPanel(); panel2.setLayout(new GridLayoutManager(1, 2, new Insets(0, 0, 0, 0), -1, -1, true, false)); panel1.add( panel2, new GridConstraints( 1, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_VERTICAL, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false)); buttonOK = new JButton(); buttonOK.setText("OK"); panel2.add( buttonOK, new GridConstraints( 0, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false)); buttonCancel = new JButton(); buttonCancel.setText("Cancel"); panel2.add( buttonCancel, new GridConstraints( 0, 1, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false)); final JSeparator separator1 = new JSeparator(); panel1.add( separator1, new GridConstraints( 0, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false)); final JLabel label1 = new JLabel(); label1.setText("Column metadata"); contentPane.add( label1, new GridConstraints( 2, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(76, 14), null, 0, false)); final JToolBar toolBar1 = new JToolBar(); toolBar1.setFloatable(false); contentPane.add( toolBar1, new GridConstraints( 2, 1, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(-1, 20), null, 0, false)); addButton = new JButton(); addButton.setIcon(new ImageIcon(getClass().getResource("/images/add.png"))); addButton.setMaximumSize(new Dimension(27, 27)); addButton.setMinimumSize(new Dimension(27, 27)); addButton.setPreferredSize(new Dimension(27, 27)); addButton.setText(""); addButton.setToolTipText("Add metadata attribute"); toolBar1.add(addButton); removeButton = new JButton(); removeButton.setEnabled(false); removeButton.setHorizontalAlignment(0); removeButton.setIcon(new ImageIcon(getClass().getResource("/images/delete.png"))); removeButton.setMaximumSize(new Dimension(27, 27)); removeButton.setMinimumSize(new Dimension(27, 27)); removeButton.setPreferredSize(new Dimension(27, 27)); removeButton.setText(""); removeButton.setToolTipText("Remove metadata attribute"); toolBar1.add(removeButton); final JScrollPane scrollPane1 = new JScrollPane(); contentPane.add( scrollPane1, new GridConstraints( 3, 0, 1, 2, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false)); metadataTable = new JTable(); scrollPane1.setViewportView(metadataTable); final JSeparator separator2 = new JSeparator(); contentPane.add( separator2, new GridConstraints( 1, 0, 1, 2, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false)); final JPanel panel3 = new JPanel(); panel3.setLayout(new GridLayoutManager(1, 2, new Insets(0, 0, 0, 0), -1, -1)); contentPane.add( panel3, new GridConstraints( 0, 0, 1, 2, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false)); final JLabel label2 = new JLabel(); label2.setText("Column Family:"); panel3.add( label2, new GridConstraints( 0, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false)); columnFamilyTextField = new JTextField(); panel3.add( columnFamilyTextField, new GridConstraints( 0, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(150, 24), null, 0, false)); label2.setLabelFor(columnFamilyTextField); } /** * @noinspection ALL */ public JComponent $$$getRootComponent$$$() { return contentPane; } }
apache-2.0
eljefe6a/incubator-beam
runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactory.java
3740
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow; import java.util.List; import org.apache.beam.runners.core.construction.ForwardingPTransform; import org.apache.beam.runners.core.construction.PTransformReplacements; import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.common.runner.v1.RunnerApi.DisplayData; import org.apache.beam.sdk.runners.AppliedPTransform; import org.apache.beam.sdk.runners.PTransformOverrideFactory; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.ParDo.SingleOutput; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; /** * A {@link PTransformOverrideFactory} that produces {@link ParDoSingle} instances from {@link * ParDo.SingleOutput} instances. {@link ParDoSingle} is a primitive {@link PTransform}, to ensure * that {@link DisplayData} appears on all {@link ParDo ParDos} in the {@link DataflowRunner}. */ public class PrimitiveParDoSingleFactory<InputT, OutputT> extends SingleInputOutputOverrideFactory< PCollection<? extends InputT>, PCollection<OutputT>, ParDo.SingleOutput<InputT, OutputT>> { @Override public PTransformReplacement<PCollection<? extends InputT>, PCollection<OutputT>> getReplacementTransform( AppliedPTransform< PCollection<? extends InputT>, PCollection<OutputT>, SingleOutput<InputT, OutputT>> transform) { return PTransformReplacement.of( PTransformReplacements.getSingletonMainInput(transform), new ParDoSingle<>( transform.getTransform(), PTransformReplacements.getSingletonMainOutput(transform).getCoder())); } /** * A single-output primitive {@link ParDo}. */ public static class ParDoSingle<InputT, OutputT> extends ForwardingPTransform<PCollection<? extends InputT>, PCollection<OutputT>> { private final ParDo.SingleOutput<InputT, OutputT> original; private final Coder<OutputT> outputCoder; private ParDoSingle(SingleOutput<InputT, OutputT> original, Coder<OutputT> outputCoder) { this.original = original; this.outputCoder = outputCoder; } @Override public PCollection<OutputT> expand(PCollection<? extends InputT> input) { return PCollection.createPrimitiveOutputInternal( input.getPipeline(), input.getWindowingStrategy(), input.isBounded(), outputCoder); } public DoFn<InputT, OutputT> getFn() { return original.getFn(); } public List<PCollectionView<?>> getSideInputs() { return original.getSideInputs(); } @Override protected PTransform<PCollection<? extends InputT>, PCollection<OutputT>> delegate() { return original; } } }
apache-2.0
nadam/tg-bot-api
src/main/java/se/anyro/tgbotapi/types/inline/InlineQueryResultVoice.java
786
package se.anyro.tgbotapi.types.inline; import se.anyro.tgbotapi.types.reply_markup.InlineKeyboardMarkup; /** * @see <a href="https://core.telegram.org/bots/api#inlinequeryresultvoice">Official documentation of * InlineQueryResultVoice</a> */ public class InlineQueryResultVoice extends InlineQueryResult { public String type = "voice"; public String id; public String voice_url; public String title; public String caption; public String parse_mode; public int voice_duration; public InlineKeyboardMarkup reply_markup; public InputMessageContent input_message_content; public InlineQueryResultVoice(String id, String voiceUrl, String title) { this.id = id; this.voice_url = voiceUrl; this.title = title; } }
apache-2.0
SES-fortiss/SmartGridCoSimulation
projects/memapCore/src/main/java/memap/examples/BrowseNodes.java
4011
//This class implements the browsing of nodes in the opc ua server. // similar file in memap.helperOPCua package-ReadClient.java package memap.examples; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiConsumer; import org.eclipse.milo.opcua.sdk.client.OpcUaClient; import org.eclipse.milo.opcua.stack.core.AttributeId; import org.eclipse.milo.opcua.stack.core.Identifiers; import org.eclipse.milo.opcua.stack.core.types.builtin.DataValue; import org.eclipse.milo.opcua.stack.core.types.builtin.NodeId; import org.eclipse.milo.opcua.stack.core.types.builtin.QualifiedName; import org.eclipse.milo.opcua.stack.core.types.builtin.StatusCode; import org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UInteger; import org.eclipse.milo.opcua.sdk.client.api.nodes.Node; import org.eclipse.milo.opcua.sdk.client.api.subscriptions.UaMonitoredItem; import org.eclipse.milo.opcua.sdk.client.api.subscriptions.UaSubscription; import org.eclipse.milo.opcua.stack.core.types.enumerated.BrowseDirection; import org.eclipse.milo.opcua.stack.core.types.enumerated.BrowseResultMask; import org.eclipse.milo.opcua.stack.core.types.enumerated.MonitoringMode; import org.eclipse.milo.opcua.stack.core.types.enumerated.NodeClass; import org.eclipse.milo.opcua.stack.core.types.enumerated.TimestampsToReturn; import org.eclipse.milo.opcua.stack.core.types.structured.BrowseDescription; import org.eclipse.milo.opcua.stack.core.types.structured.BrowsePath; import org.eclipse.milo.opcua.stack.core.types.structured.BrowsePathResult; import org.eclipse.milo.opcua.stack.core.types.structured.BrowseResult; import org.eclipse.milo.opcua.stack.core.types.structured.MonitoredItemCreateRequest; import org.eclipse.milo.opcua.stack.core.types.structured.MonitoringParameters; import org.eclipse.milo.opcua.stack.core.types.structured.ReadValueId; import org.eclipse.milo.opcua.stack.core.types.structured.ReferenceDescription; import static org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.Unsigned.uint; import static org.eclipse.milo.opcua.stack.core.util.ConversionUtil.toList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.google.common.collect.Lists.newArrayList; import memap.helperOPCua.*; @SuppressWarnings("unused") public class BrowseNodes implements MemapClient { public static void main(String[] args) throws Exception { BrowseNodes example = new BrowseNodes(); new OpcuaClient(example).run(); } private final Logger logger = LoggerFactory.getLogger(getClass()); @Override public void run(OpcUaClient client, CompletableFuture<OpcUaClient> future) throws Exception { // synchronous connect client.connect().get(); browseNode("",client, Identifiers.RootFolder); future.complete(client); } private void browseNode(String indent, OpcUaClient client, NodeId browseRoot) { BrowseDescription browse = new BrowseDescription( browseRoot, BrowseDirection.Forward, Identifiers.References, true, uint(NodeClass.Object.getValue() | NodeClass.Variable.getValue()), uint(BrowseResultMask.All.getValue()) ); try { BrowseResult browseResult = client.browse(browse).get(); List<ReferenceDescription> references = toList(browseResult.getReferences()); for (ReferenceDescription rd : references) { logger.info("{} Node={}", indent, rd.getBrowseName().getName()); // recursively browse to children rd.getNodeId().local().ifPresent(nodeId -> browseNode(indent + " ", client, nodeId)); } } catch (InterruptedException | ExecutionException e) { logger.error("Browsing nodeId={} failed: {}", browseRoot, e.getMessage(), e); } } }
apache-2.0
pentaho/pentaho-mongodb-plugin
pentaho-mongodb-plugin/src/test/java/org/pentaho/di/trans/steps/mongodboutput/MongoDbOutputMetaInjectionTest.java
7631
/*! * Copyright 2010 - 2021 Hitachi Vantara. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.pentaho.di.trans.steps.mongodboutput; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.pentaho.di.core.injection.BaseMetadataInjectionTest; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LogChannelInterfaceFactory; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * MDI test for MongoDbOutput. */ public class MongoDbOutputMetaInjectionTest extends BaseMetadataInjectionTest<MongoDbOutputMeta> { private LogChannelInterfaceFactory oldLogChannelInterfaceFactory; @Before public void setup() throws IllegalAccessException { oldLogChannelInterfaceFactory = KettleLogStore.getLogChannelInterfaceFactory(); setKettleLogFactoryWithMock(); setup( new MongoDbOutputMeta() ); } public static void setKettleLogFactoryWithMock() { LogChannelInterfaceFactory logChannelInterfaceFactory = mock( LogChannelInterfaceFactory.class ); LogChannelInterface logChannelInterface = mock( LogChannelInterface.class ); when( logChannelInterfaceFactory.create( any() ) ).thenReturn( logChannelInterface ); KettleLogStore.setLogChannelInterfaceFactory( logChannelInterfaceFactory ); } @After public void tearDown() { KettleLogStore.setLogChannelInterfaceFactory( oldLogChannelInterfaceFactory ); } @Test public void test() throws Exception { check( "TRUNCATE", new BooleanGetter() { public boolean get() { return meta.m_truncate; } } ); check( "UPDATE", new BooleanGetter() { public boolean get() { return meta.m_update; } } ); check( "UPSERT", new BooleanGetter() { public boolean get() { return meta.m_upsert; } } ); check( "MULTI", new BooleanGetter() { public boolean get() { return meta.m_multi; } } ); check( "MODIFIER_UPDATE", new BooleanGetter() { public boolean get() { return meta.m_modifierUpdate; } } ); check( "BATCH_INSERT_SIZE", new StringGetter() { public String get() { return meta.m_batchInsertSize; } } ); check( "RETRY_NUMBER", new StringGetter() { public String get() { return meta.getWriteRetries(); } } ); check( "RETRY_DELAY", new StringGetter() { public String get() { return meta.getWriteRetryDelay(); } } ); check( "HOSTNAME", new StringGetter() { public String get() { return meta.getHostnames(); } } ); check( "PORT", new StringGetter() { public String get() { return meta.getPort(); } } ); check( "DATABASE_NAME", new StringGetter() { public String get() { return meta.getDbName(); } } ); check( "COLLECTION", new StringGetter() { public String get() { return meta.getCollection(); } } ); check( "AUTH_DATABASE", new StringGetter() { public String get() { return meta.getAuthenticationDatabaseName(); } } ); check( "AUTH_USERNAME", new StringGetter() { public String get() { return meta.getAuthenticationUser(); } } ); check( "AUTH_PASSWORD", new StringGetter() { public String get() { return meta.getAuthenticationPassword(); } } ); check( "AUTH_MECHANISM", new StringGetter() { public String get() { return meta.getAuthenticationMechanism(); } } ); check( "AUTH_KERBEROS", new BooleanGetter() { public boolean get() { return meta.getUseKerberosAuthentication(); } } ); check( "TIMEOUT_CONNECTION", new StringGetter() { public String get() { return meta.getConnectTimeout(); } } ); check( "TIMEOUT_SOCKET", new StringGetter() { public String get() { return meta.getSocketTimeout(); } } ); check( "USE_SSL_SOCKET_FACTORY", new BooleanGetter() { public boolean get() { return meta.isUseSSLSocketFactory(); } } ); check( "USE_CONNECTION_STRING", new BooleanGetter() { @Override public boolean get() { return meta.isUseConnectionString(); } } ); check( "USE_LEGACY_OPTIONS", new BooleanGetter() { @Override public boolean get() { return meta.isUseLegacyOptions(); } } ); check( "CONNECTION_STRING", new StringGetter() { @Override public String get() { return meta.getConnectionString(); } } ); check( "READ_PREFERENCE", new StringGetter() { public String get() { return meta.getReadPreference(); } } ); check( "USE_ALL_REPLICA_SET_MEMBERS", new BooleanGetter() { public boolean get() { return meta.getUseAllReplicaSetMembers(); } } ); check( "INCOMING_FIELD_NAME", new StringGetter() { public String get() { return meta.getMongoFields().get( 0 ).m_incomingFieldName; } } ); check( "MONGO_DOCUMENT_PATH", new StringGetter() { public String get() { return meta.getMongoFields().get( 0 ).m_mongoDocPath; } } ); check( "INCOMING_AS_MONGO", new BooleanGetter() { public boolean get() { return meta.getMongoFields().get( 0 ).m_useIncomingFieldNameAsMongoFieldName; } } ); check( "UPDATE_MATCH_FIELD", new BooleanGetter() { public boolean get() { return meta.getMongoFields().get( 0 ).m_updateMatchField; } } ); check( "MODIFIER_OPERATION", new StringGetter() { public String get() { return meta.getMongoFields().get( 0 ).m_modifierUpdateOperation; } } ); check( "MODIFIER_POLICY", new StringGetter() { public String get() { return meta.getMongoFields().get( 0 ).m_modifierOperationApplyPolicy; } } ); check( "INSERT_NULL", new BooleanGetter() { public boolean get() { return meta.getMongoFields().get( 0 ).insertNull; } } ); check( "JSON", new BooleanGetter() { public boolean get() { return meta.getMongoFields().get( 0 ).m_JSON; } } ); check( "INDEX_FIELD", new StringGetter() { public String get() { return meta.getMongoIndexes().get( 0 ).m_pathToFields; } } ); check( "DROP", new BooleanGetter() { public boolean get() { return meta.getMongoIndexes().get( 0 ).m_drop; } } ); check( "UNIQUE", new BooleanGetter() { public boolean get() { return meta.getMongoIndexes().get( 0 ).m_unique; } } ); check( "SPARSE", new BooleanGetter() { public boolean get() { return meta.getMongoIndexes().get( 0 ).m_sparse; } } ); check( "TAG_SET", new StringGetter() { public String get() { return meta.getReadPrefTagSets().get( 0 ); } } ); } }
apache-2.0
Communote/communote-server
communote/persistence/src/main/java/com/communote/server/core/vo/query/blog/BlogQueryParameters.java
24126
package com.communote.server.core.vo.query.blog; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.hibernate.criterion.MatchMode; import com.communote.server.api.ServiceLocator; import com.communote.server.api.core.blog.BlogAccessException; import com.communote.server.api.core.blog.BlogData; import com.communote.server.api.core.blog.BlogManagement; import com.communote.server.api.core.blog.BlogNotFoundException; import com.communote.server.api.core.common.IdentifiableEntityData; import com.communote.server.api.core.config.type.ClientProperty; import com.communote.server.api.core.tag.TagStoreType; import com.communote.server.core.blog.helper.BlogManagementHelper; import com.communote.server.core.security.SecurityHelper; import com.communote.server.core.tag.TagStoreManagement; import com.communote.server.core.vo.query.PropertyQueryParameters; import com.communote.server.model.blog.Blog; import com.communote.server.model.blog.BlogConstants; import com.communote.server.model.blog.BlogRole; import com.communote.server.persistence.tag.TagStore; /** * @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a> */ public class BlogQueryParameters extends PropertyQueryParameters { /** * Parameter for the user ID. */ public final static String PARAM_USER_ID = "userId"; /** * the parameter name for tags starting with some pattern */ public final static String PARAM_BLOG_TAGPREFIX_SEARCH = "blogTagPrefix"; /** * Parameter for write access roles. */ public final static String PARAM_WRITE_ACCESS_ROLES = "write_access_roles"; /** * Parameter for read access roles. */ public final static String PARAM_READ_ACCESS_ROLES = "read_access_roles"; /** * Parameter for manager access roles. */ public final static String PARAM_MANAGER_ACCESS_ROLES = "manager_access_roles"; /** * Parameter for the IDs of the blogs to ignore. */ public final static String PARAM_BLOGS_TO_IGNORE = "blogs_to_ignore"; /** * Parameter for the blog IDs. */ public final static String PARAM_BLOG_IDS = "blog_ids"; /** * Parameter for the parent topic IDs. */ public final static String PARAM_PARENT_TOPIC_IDS = "parent_topic_ids"; /** * Parameter for the blog aliases. */ public final static String PARAM_BLOG_ALIASES = "blog_aliases"; /** * Paramater for the last modification date */ public final static String PARAM_LAST_MODIFICATION_DATE = "lastModificationDate"; /** * Paramater for the event type date */ public final static String PARAM_EVENT_TYPE = "eventType"; /** * Paramater for external system id */ public final static String PARAM_EXTERNAL_OBJECT_SYSTEM_ID = "externalObjectSystemId"; /** * Paramater for external object id */ public final static String PARAM_EXTERNAL_OBJECT_ID = "externalObjectId"; /** */ public final static int SEARCH_FIELD_TITLE = 1; /** */ public final static int SEARCH_FIELD_DESCRIPTION = 2; /** */ public final static int SEARCH_FIELD_IDENTIFIER = 4; /** */ public final static int SEARCH_FIELD_BLOG_TAGS = 8; /** * the parameter name for the selected tags */ private final static String PARAM_BLOG_TAG_PREFIX = "blogTag"; /** Parameter for blog searches. */ private static final String PARAM_BLOG_TEXT_SEARCH_PREFIX = "blogTextSearch"; /** * @return a map with the blog access roles, keys are the access parameters */ public static Map<String, BlogRole[]> getBlogAccessParameter() { Map<String, BlogRole[]> result = new HashMap<String, BlogRole[]>(); result.put(PARAM_READ_ACCESS_ROLES, new BlogRole[] { BlogRole.VIEWER, BlogRole.MEMBER, BlogRole.MANAGER }); result.put(PARAM_WRITE_ACCESS_ROLES, new BlogRole[] { BlogRole.MEMBER, BlogRole.MANAGER }); result.put(PARAM_MANAGER_ACCESS_ROLES, new BlogRole[] { BlogRole.MANAGER }); return result; } private final Set<Long> tagIds = new HashSet<Long>(); private final Map<String, Set<String>> tagStoreTagIds = new HashMap<String, Set<String>>(); private Boolean multilingualTagPrefixSearch = null; private Set<String> tagStoreAliases = new HashSet<String>(); private Long userId; private TopicAccessLevel accessLevel; private Date minimumLastModificationDate; private Long[] blogsToExclude; private Long[] blogIds; private String[] blogAliases; private int searchFieldMask; private MatchMode matchMode = MatchMode.ANYWHERE; private String[] textFilters; private String[] textFilterParamNames; private Boolean showOnlyFollowedItems = false; private boolean forceAllTopics = false; private boolean excludeToplevelTopics; private boolean showOnlyToplevelTopics; private boolean showOnlyRootTopics = false; private String[] tags; private String tagPrefix; private boolean renderTagsJoin; private boolean includeChildTopics; private String externalObjectSystemId; private String externalObjectId; private Long[] parentTopicIds = new Long[0]; /** * Create a new parameters object. */ public BlogQueryParameters() { // include top level topics by default if enabled excludeToplevelTopics = !ClientProperty.TOP_LEVEL_TOPICS_ENABLED .getValue(ClientProperty.DEFAULT_TOP_LEVEL_TOPICS_ENABLED); } /** * @param tagId * TagId to filter for. */ public void addTagId(Long tagId) { tagIds.add(tagId); } /** * @param tagStoreAlias * The alias of the TagStore. * @param tagStoreTagIds * Collection of tag ids to add for the given TagStore. */ public void addTagStoreTagId(String tagStoreAlias, Collection<String> tagStoreTagIds) { Set<String> tags = this.tagStoreTagIds.get(tagStoreAlias); if (tags == null) { tags = new HashSet<String>(); this.tagStoreTagIds.put(tagStoreAlias, tags); } tags.addAll(tagStoreTagIds); } /** * @param tagStoreAlias * The alias of the TagStore. * @param tagStoreTagId * The id of the tag within the TagStore. */ public void addTagStoreTagId(String tagStoreAlias, String tagStoreTagId) { Set<String> tags = tagStoreTagIds.get(tagStoreAlias); if (tags == null) { tags = new HashSet<String>(); tagStoreTagIds.put(tagStoreAlias, tags); } tags.add(tagStoreTagId); } /** * Returns the blog access level. * * @return the access level */ public TopicAccessLevel getAccessLevel() { return accessLevel; } /** * The blog aliases to which the result set will be reduced. * * @return the aliases or null */ public String[] getBlogAliases() { return blogAliases; } /** * The blog IDs to which the result set will be reduced. * * @return the IDs or null */ public Long[] getBlogIds() { return blogIds; } /** * Returns the blog IDs which will not be included in the response. * * @return the blogsToExclude */ public Long[] getBlogsToExclude() { return blogsToExclude; } /** * Get the parameter name for the selected tag of a given index (if its a parameter list) * * @param index * the index * @return the parameter name to the index */ public String getBlogTagConstant(int index) { return PARAM_BLOG_TAG_PREFIX + index; } /** * If both {@link #getExternalObjectId()} and {@link #getExternalObjectSystemId()} are set, both * must match on the same external object * * @return filter for topics which have an external object with the given external id assigned. */ public String getExternalObjectId() { return externalObjectId; } /** * If both {@link #getExternalObjectId()} and {@link #getExternalObjectSystemId()} are set, both * must match on the same external object * * @return filter for topics which have an external object with the given system id assigned. */ public String getExternalObjectSystemId() { return externalObjectSystemId; } /** * @return the matchMode */ public MatchMode getMatchMode() { return matchMode; } /** * @return the minimum last modification the blog must be modified AFTER */ public Date getMinimumLastModificationDate() { return minimumLastModificationDate; } /** * {@inheritDoc} */ @Override public Map<String, Object> getParameters() { Map<String, Object> parameter = super.getParameters(); parameter.put(PARAM_USER_ID, userId); parameter.put(PARAM_BLOGS_TO_IGNORE, blogsToExclude); parameter.put(PARAM_BLOG_IDS, blogIds); parameter.put(PARAM_BLOG_ALIASES, blogAliases); if (getTags() != null) { for (int i = 0; i < getTags().length; i++) { parameter.put(getBlogTagConstant(i), getTags()[i].toLowerCase(Locale.ENGLISH)); } } if (minimumLastModificationDate != null) { parameter.put(PARAM_LAST_MODIFICATION_DATE, minimumLastModificationDate); } if (searchFieldMask != 0 && textFilters != null) { putParametersForSearch(parameter, textFilterParamNames, textFilters, getMatchMode(), true); } if (StringUtils.isNotBlank(tagPrefix)) { putParametersForSearch(parameter, new String[] { PARAM_BLOG_TAGPREFIX_SEARCH }, new String[] { tagPrefix }, MatchMode.START, true); } if (getParentTopicIds().length > 0) { parameter.put(PARAM_PARENT_TOPIC_IDS, parentTopicIds); } parameter.putAll(getBlogAccessParameter()); parameter.put(PARAM_EXTERNAL_OBJECT_ID, this.externalObjectId); parameter.put(PARAM_EXTERNAL_OBJECT_SYSTEM_ID, this.externalObjectSystemId); return parameter; } /** * @return Array of parent topics to filter for. Is never null. */ public Long[] getParentTopicIds() { return parentTopicIds; } /** * @return the tagIds */ public Set<Long> getTagIds() { return tagIds; } /** * @return the prefix tags must have */ public String getTagPrefix() { return tagPrefix; } /** * @return the tags */ public String[] getTags() { return tags; } /** * @return set of aliases identifying tag stores */ public Set<String> getTagStoreAliases() { return tagStoreAliases; } /** * @return the tagStoreTagIds */ public Map<String, Set<String>> getTagStoreTagIds() { return tagStoreTagIds; } /** * Returns the strings to be found in title, description or identifier of blog. * * @return the textFilter */ public String[] getTextFilter() { return textFilters; } /** * @return the names of the parameters that substitute the text search values */ public String[] getTextFilterParamNames() { return textFilterParamNames; } /** * @return the user id */ public Long getUserId() { return userId; } /** * @return whether to exclude topics marked as top level topic from the result. If top level * topics are disabled this method returns true by default. If * {@link #isShowOnlyToplevelTopics()} returns true this flag is ignored. */ public boolean isExcludeToplevelTopics() { return excludeToplevelTopics; } /** * @return the forceAllTopics flag. If true, all topics should be respected, regardless of * access roles. The current user has to be a client manager to use this flag. */ public boolean isForceAllTopics() { return forceAllTopics; } /** * @return Whether to include the child topics in the query. This parameter will only be ignored * if the blogIDs parameter is unset. */ public boolean isIncludeChildTopics() { return includeChildTopics; } /** * @return true if a tag prefix query should check the translations of tags */ public boolean isMultilingualTagPrefixSearch() { if (multilingualTagPrefixSearch == null) { TagStoreManagement tagStoreManagement = ServiceLocator.instance().getService( TagStoreManagement.class); for (String storeAlias : this.tagStoreAliases) { TagStore store = tagStoreManagement.getTagStore(storeAlias, null); if (store.isMultilingual()) { multilingualTagPrefixSearch = Boolean.TRUE; break; } else { multilingualTagPrefixSearch = Boolean.FALSE; } } if (multilingualTagPrefixSearch == null) { multilingualTagPrefixSearch = tagStoreManagement .hasMultilingualTagStore(TagStoreType.Types.BLOG); } } return multilingualTagPrefixSearch; } /** * @return the renderTagsJoin */ public boolean isRenderTagsJoin() { return renderTagsJoin; } /** * Tests whether the query should search in a specific field. The field is identified with one * of the SEARCH_FIELD_X constants. * * @param fieldConstant * one of the SEARCH_FIELD_X of this class * @return true if the constant is enabled in the search field mask set by * {@link #setSearchFieldMask(int)} */ protected boolean isSearchInField(int fieldConstant) { return (this.searchFieldMask & fieldConstant) != 0; } /** * @return Whether only root topics should be retrieved. Root topics are all topics which do not * have a parent topic. Topics whose parent topics are not readable by the current user * are not treated as root topics. This flag is ignored if { * {@link #isShowOnlyToplevelTopics()} returns true. */ public boolean isShowOnlyRootTopics() { return showOnlyRootTopics; } /** * @return True, if only topics marked as top level topic should be returned. If this flag is * true, the return values of {@link #isShowOnlyRootTopics()} and * {@link #isExcludeToplevelTopics()} will be ignored. */ public boolean isShowOnlyToplevelTopics() { return showOnlyToplevelTopics; } /** * returns true. * * @return true. */ @Override public boolean needTransformListItem() { return true; } /** * Sets the blog access level * * @param accessLevel * the access level to filter for */ public void setAccessLevel(TopicAccessLevel accessLevel) { this.accessLevel = accessLevel; } /** * Used to reduce the result to blogs with specific aliases. * * @param aliases * the aliases of the blogs */ public void setBlogAliases(String[] aliases) { this.blogAliases = aliases; } /** * Used to reduce the result to blogs with specific IDs. * * @param blogIds * the blog IDs */ public void setBlogIds(Long[] blogIds) { this.blogIds = blogIds; } /** * Used to set blog IDs to exclude from search. * * @param blogsToExclude * the blogsToExclude to set */ public void setBlogsToExclude(Long[] blogsToExclude) { this.blogsToExclude = blogsToExclude; } /** * Set whether to exclude top level topics from the result. This flag will be ignored if * {@link #isShowOnlyToplevelTopics()} returns true. * * @param exclude * True if top level topics should be excluded. * */ public void setExcludeToplevelTopics(boolean exclude) { this.excludeToplevelTopics = exclude; } public void setExternalObjectId(String externalObjectId) { this.externalObjectId = externalObjectId; } public void setExternalObjectSystemId(String externalObjectSystemId) { this.externalObjectSystemId = externalObjectSystemId; } /** * @param forceAllTopics * True, if all topics should be shown, regardless of access roles. The current user * has to be a client manager to use this flag. */ public void setForceAllTopics(boolean forceAllTopics) { this.forceAllTopics = forceAllTopics; } /** * Whether to include the child topics in the query. This parameter will only be ignored if the * blogIDs parameter is unset. * * @param includeChildTopics * whether to include the child topics */ public void setIncludeChildTopics(boolean includeChildTopics) { this.includeChildTopics = includeChildTopics; } /** * Sets the match mode text filtering. The default is to match anywhere. * * @param matchMode * the matchMode to set */ public void setMatchMode(MatchMode matchMode) { this.matchMode = matchMode; } /** * @param minimumLastModificationDate * the minimum last modification the blog must be modified AFTER */ public void setMinimumLastModificationDate(Date minimumLastModificationDate) { this.minimumLastModificationDate = minimumLastModificationDate; } /** * @param parentTopicIds * Array of parent topics. */ public void setParentTopicIds(Long[] parentTopicIds) { if (parentTopicIds != null) { this.parentTopicIds = parentTopicIds; } } /** * @param renderTagsJoin * the renderTagsJoin to set */ public void setRenderTagsJoin(boolean renderTagsJoin) { this.renderTagsJoin = renderTagsJoin; } /** * Can be used to set the fields to be searched with the {@link #getTextFilter() text filter}. * * @param searchFieldMask * a bitwise combination of the SEARCH_*_FIELD flags. * @see #SEARCH_FIELD_DESCRIPTION * @see #SEARCH_FIELD_IDENTIFIER * @see #SEARCH_FIELD_TITLE * @see #SEARCH_FIELD_BLOG_TAGS */ public void setSearchFieldMask(int searchFieldMask) { this.searchFieldMask = searchFieldMask; } /** * @param showFollowing * the showFollowing to set */ public void setShowOnlyFollowedItems(Boolean showFollowing) { this.showOnlyFollowedItems = showFollowing; } /** * Set whether only root topics should be returned. This flag will be ignored if * {@link #isShowOnlyToplevelTopics()} returns true. * * @param showOnlyRootTopics * True if only root topics should be returned. * */ public void setShowOnlyRootTopics(boolean showOnlyRootTopics) { this.showOnlyRootTopics = showOnlyRootTopics; } /** * Set whether only topics marked as top level topic should be returned. If set to true * {@link #isShowOnlyToplevelTopics()} and {@link #isExcludeToplevelTopics()} will be ignored. * * @param showOnlyToplevelTopics * True if only top level topics should be returned. * */ public void setShowOnlyToplevelTopics(boolean showOnlyToplevelTopics) { this.showOnlyToplevelTopics = showOnlyToplevelTopics; } /** * @param tagPrefix * the prefix tags must have */ public void setTagPrefix(String tagPrefix) { this.tagPrefix = tagPrefix; } /** * @param tags * the tags to set */ public void setTags(String[] tags) { this.tags = tags; } /** * Set aliases of tag stores to only consider tags from these stores * * @param tagStoreAliases * set of aliases identifying tag stores */ public void setTagStoreAliases(Set<String> tagStoreAliases) { if (tagStoreAliases == null) { tagStoreAliases = new HashSet<String>(); } // reset the multilingual search this.multilingualTagPrefixSearch = false; this.tagStoreAliases = tagStoreAliases; } /** * Set the strings to be found in title, description or identifier of blog. * * @param textFilter * the full text strings to search for */ public void setTextFilter(String[] textFilter) { this.textFilterParamNames = createParameterNamesForSearch(PARAM_BLOG_TEXT_SEARCH_PREFIX, textFilter); this.textFilters = textFilter; } /** * @param userId * the user id to filter for */ public void setUserId(Long userId) { this.userId = userId; } /** * @return {@code true} if only followed items are shown */ public Boolean showOnlyFollowedItems() { return showOnlyFollowedItems; } /** * Sort ascending by the last modification date of the blog. This means the oldest modified blog * will be returned as first element */ public void sortByLastModificationDateAsc() { this.addSortField(BlogQuery.ALIAS_BLOG, BlogConstants.LASTMODIFICATIONDATE, SORT_ASCENDING); } /** * sort by the name of the blogs */ public void sortByNameAsc() { this.addSortField("lower(" + BlogQuery.ALIAS_BLOG, BlogConstants.TITLE + ")", SORT_ASCENDING); } /** * Transforms the BlogData. Sets the description for BlogData because the description is * not allow in query (clob in a oracle db environment) * * @param resultItem * The resultItem to transform * @return The transformed BlogData */ @Override public IdentifiableEntityData transformResultItem(Object resultItem) { BlogData result = (BlogData) resultItem; BlogManagement blogManagement = ServiceLocator .findService(BlogManagement.class); Blog blog; try { blog = isForceAllTopics() && SecurityHelper.isClientManager() ? blogManagement.findBlogByIdWithoutAuthorizationCheck(result.getId()) : blogManagement.getBlogById(result.getId(), false); } catch (BlogNotFoundException e) { throw BlogManagementHelper.convertException(e); } catch (BlogAccessException e) { throw BlogManagementHelper.convertException(e); } result.setDescription(blog.getDescription()); return result; } }
apache-2.0
yokmama/honki_android2
Chapter04/Lesson17/before/app/src/main/java/com/kayosystem/honki/chapter04/lesson17/fragment/GridLayoutFragment.java
655
package com.kayosystem.honki.chapter04.lesson17.fragment; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.kayosystem.honki.chapter04.lesson17.R; /** * A simple {@link Fragment} subclass. */ public class GridLayoutFragment extends Fragment { @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment return inflater.inflate(R.layout.fragment_grid_layout, container, false); } }
apache-2.0
galderz/Aeron
aeron-driver/src/main/java/io/aeron/driver/status/SubscriberPos.java
1846
/* * Copyright 2014-2019 Real Logic Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.driver.status; import org.agrona.MutableDirectBuffer; import org.agrona.concurrent.status.CountersManager; import org.agrona.concurrent.status.UnsafeBufferPosition; /** * The position an individual Subscriber has reached on a session-channel-stream tuple. It is possible to have multiple * Subscribers on the same machine tracked by a {@link io.aeron.driver.MediaDriver}. */ public class SubscriberPos { /** * Type id of a subscriber position counter. */ public static final int SUBSCRIBER_POSITION_TYPE_ID = 4; /** * Human readable name for the counter. */ public static final String NAME = "sub-pos"; public static UnsafeBufferPosition allocate( final MutableDirectBuffer tempBuffer, final CountersManager countersManager, final long registrationId, final int sessionId, final int streamId, final String channel, final long joinPosition) { return StreamCounter.allocate( tempBuffer, NAME, SUBSCRIBER_POSITION_TYPE_ID, countersManager, registrationId, sessionId, streamId, channel, joinPosition); } }
apache-2.0
thomaskrause/ANNIS
annis-service/src/main/java/annis/dao/CorpusSelectionStrategy.java
1029
/* * Copyright 2009-2011 Collaborative Research Centre SFB 632 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package annis.dao; import java.util.List; import annis.model.Annotation; import annis.sqlgen.NodeSqlAdapter; public interface CorpusSelectionStrategy { boolean usesViews(); void registerNodeAdapter(NodeSqlAdapter adapter); String createViewSql(); String whereClauseForNode(String docRefColumn); String viewName(String table); void addMetaAnnotations(List<Annotation> annotations); }
apache-2.0
TZClub/OMIPlatform
dao-manager/src/main/java/tz/gzu/oip/dm/dao/sp/ShopgroupbuyDao.java
752
package tz.gzu.oip.dm.dao.sp; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import java.util.List; import tz.gzu.oip.dm.bo.ops.PageQuery; import tz.gzu.oip.dm.po.Shopgroupbuy; @Mapper public interface ShopgroupbuyDao { int insert(@Param("shopgroupbuy") Shopgroupbuy shopgroupbuy); int insertSelective(@Param("shopgroupbuy") Shopgroupbuy shopgroupbuy); int insertList(@Param("shopgroupbuys") List<Shopgroupbuy> shopgroupbuys); int update(@Param("shopgroupbuy") Shopgroupbuy shopgroupbuy); List<Shopgroupbuy> findAllGroupBuy(@Param("pageQuery") PageQuery pageQuery); int count(); void delete(@Param("gpid") Integer gpid); Shopgroupbuy findShopGroupBuyById(int gpid); }
apache-2.0
Mendeley/mendeley-android-sdk
library/src/main/java/com/mendeley/sdk/request/CancellableOutputStream.java
1357
package com.mendeley.sdk.request; import java.io.IOException; import java.io.OutputStream; import java.util.concurrent.CancellationException; /** * Wrapper over {@link java.io.OutputStream} that will stop writing to it if it's been cancelled */ public abstract class CancellableOutputStream extends OutputStream { private final OutputStream delegate; protected CancellableOutputStream(OutputStream delegate) { this.delegate = delegate; } @Override public void close() throws IOException { delegate.close(); } @Override public void flush() throws IOException { delegate.flush(); } @Override public void write(byte[] buffer) throws IOException { delegate.write(buffer); } @Override public void write(byte[] buffer, int offset, int count) throws IOException { if (isCancelled()) { throw new CancellationException("Writing to output stream interrupted due to cancellation"); } delegate.write(buffer, offset, count); } @Override public void write(int oneByte) throws IOException { if (isCancelled()) { throw new CancellationException("Writing to output stream interrupted due to cancellation"); } delegate.write(oneByte); } protected abstract boolean isCancelled(); }
apache-2.0
AutomationRockstars/Design
reporting/agent-gunter/src/main/java/com/automationrockstars/monitoring/gunter/Emitter.java
1176
package com.automationrockstars.monitoring.gunter; import com.automationrockstars.gunter.events.EventFactory; import com.automationrockstars.gunter.rabbit.RabbitEventBroker; import org.apache.commons.io.IOUtils; import java.io.IOException; import java.util.Map; public class Emitter { private static String hostname = null; private static String hostname() { if (hostname == null) { try { Process p = Runtime.getRuntime().exec("hostname"); p.waitFor(); hostname = IOUtils.toString(p.getInputStream()).replaceAll("\\n|\\r", ""); } catch (InterruptedException | IOException e) { hostname = null; } } return hostname; } public static synchronized void send(final String type, final Map<String, Number> sample) { try { RabbitEventBroker.publisher("monitoring", "*").fireEvent(EventFactory.toJson( EventFactory.createSample(hostname(), type, sample))); } catch (Exception e) { } } public static synchronized void close() { RabbitEventBroker.closeAll(); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/ModifyTrafficMirrorFilterNetworkServicesResultStaxUnmarshaller.java
2856
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model.transform; import javax.xml.stream.events.XMLEvent; import javax.annotation.Generated; import com.amazonaws.services.ec2.model.*; import com.amazonaws.transform.Unmarshaller; import com.amazonaws.transform.StaxUnmarshallerContext; import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*; /** * ModifyTrafficMirrorFilterNetworkServicesResult StAX Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ModifyTrafficMirrorFilterNetworkServicesResultStaxUnmarshaller implements Unmarshaller<ModifyTrafficMirrorFilterNetworkServicesResult, StaxUnmarshallerContext> { public ModifyTrafficMirrorFilterNetworkServicesResult unmarshall(StaxUnmarshallerContext context) throws Exception { ModifyTrafficMirrorFilterNetworkServicesResult modifyTrafficMirrorFilterNetworkServicesResult = new ModifyTrafficMirrorFilterNetworkServicesResult(); int originalDepth = context.getCurrentDepth(); int targetDepth = originalDepth + 1; if (context.isStartOfDocument()) targetDepth += 1; while (true) { XMLEvent xmlEvent = context.nextEvent(); if (xmlEvent.isEndDocument()) return modifyTrafficMirrorFilterNetworkServicesResult; if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) { if (context.testExpression("trafficMirrorFilter", targetDepth)) { modifyTrafficMirrorFilterNetworkServicesResult .setTrafficMirrorFilter(TrafficMirrorFilterStaxUnmarshaller.getInstance().unmarshall(context)); continue; } } else if (xmlEvent.isEndElement()) { if (context.getCurrentDepth() < originalDepth) { return modifyTrafficMirrorFilterNetworkServicesResult; } } } } private static ModifyTrafficMirrorFilterNetworkServicesResultStaxUnmarshaller instance; public static ModifyTrafficMirrorFilterNetworkServicesResultStaxUnmarshaller getInstance() { if (instance == null) instance = new ModifyTrafficMirrorFilterNetworkServicesResultStaxUnmarshaller(); return instance; } }
apache-2.0
NickAndroid/HengMusic
app/src/main/java/com/nick/yinheng/worker/TrackLoader.java
6961
package com.nick.yinheng.worker; import android.content.ContentResolver; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.os.Handler; import android.provider.BaseColumns; import android.provider.MediaStore; import com.nick.yinheng.model.IMediaTrack; import com.nick.yinheng.repository.DatabaseHelper; import com.nick.yinheng.service.UserCategory; import java.util.ArrayList; import java.util.List; public class TrackLoader { private static TrackLoader sLoader; private TrackLoader() { // Noop. } public static synchronized TrackLoader get() { if (sLoader == null) sLoader = new TrackLoader(); return sLoader; } public void loadAsync(final UserCategory category, final Listener listener, final Context context) { SharedExecutor.get().execute(new Runnable() { @Override public void run() { listener.postLoading(category); listener.postLoaded(category, load(category, context)); } }); } public List<IMediaTrack> load(UserCategory category, Context context) { if (category == UserCategory.ALL) return loadAll(context); if (category == UserCategory.RECENT) return loadRecent(context); throw new IllegalArgumentException("Bad category #" + category); } private List<IMediaTrack> loadRecent(Context c) { List<IMediaTrack> list = new ArrayList<IMediaTrack>(); DatabaseHelper databaseHelper = new DatabaseHelper(c); SQLiteDatabase db = databaseHelper.getWritableDatabase(); Cursor cursor = db.query(UserCategory.RECENT.name(), null, null, null, null, null, null); if (cursor != null && cursor.getCount() > 0) { for (cursor.moveToFirst(); !cursor.isAfterLast(); cursor .moveToNext()) { long id = cursor .getLong(cursor.getColumnIndex(DatabaseHelper.BaseColumns.COLUMN_SONG_ID)); String title = cursor.getString(cursor .getColumnIndexOrThrow(DatabaseHelper.BaseColumns.COLUMN_TITLE)); String singer = cursor.getString(cursor .getColumnIndexOrThrow(DatabaseHelper.BaseColumns.COLUMN_ARTIST)); int time = cursor.getInt(cursor .getColumnIndexOrThrow(DatabaseHelper.BaseColumns.COLUMN_DURATION)); String name = cursor.getString(cursor .getColumnIndexOrThrow(DatabaseHelper.BaseColumns.COLUMN_TITLE)); String url = cursor.getString(cursor .getColumnIndexOrThrow(DatabaseHelper.BaseColumns.COLUMN_URL)); String album = cursor.getString(cursor .getColumnIndexOrThrow(DatabaseHelper.BaseColumns.COLUMN_ALBUM)); long albumid = cursor.getLong(cursor .getColumnIndex(DatabaseHelper.BaseColumns.COLUMN_ALBUM_ID)); if (url.endsWith(".mp3") || url.endsWith(".MP3")) { IMediaTrack track = new IMediaTrack(); track.setTitle(title); track.setArtist(singer); track.setId(id); track.setUrl(url); track.setAlbumId(albumid); track.setAlbum(album); track.setDuration(time); list.add(track); } } } try { if (cursor != null) { cursor.close(); } } catch (Exception e) { // Noop. } return list; } private List<IMediaTrack> loadAll(Context c) { List<IMediaTrack> list = new ArrayList<IMediaTrack>(); ContentResolver cr = c.getContentResolver(); Cursor cursor = cr.query(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, null, null, null, MediaStore.Audio.Media.DEFAULT_SORT_ORDER); if (cursor != null && cursor.getCount() > 0) { for (cursor.moveToFirst(); !cursor.isAfterLast(); cursor .moveToNext()) { long id = cursor .getLong(cursor.getColumnIndex(BaseColumns._ID)); String title = cursor.getString(cursor .getColumnIndexOrThrow(MediaStore.MediaColumns.TITLE)); String singer = cursor.getString(cursor .getColumnIndexOrThrow(MediaStore.Audio.AudioColumns.ARTIST)); int time = cursor.getInt(cursor .getColumnIndexOrThrow(MediaStore.Audio.AudioColumns.DURATION)); String name = cursor.getString(cursor .getColumnIndexOrThrow(MediaStore.MediaColumns.DISPLAY_NAME)); // // String suffix = name // .substring(name.length() - 4, name.length()); String url = cursor.getString(cursor .getColumnIndexOrThrow(MediaStore.MediaColumns.DATA)); String album = cursor.getString(cursor .getColumnIndexOrThrow(MediaStore.Audio.AudioColumns.ALBUM)); long albumid = cursor.getLong(cursor .getColumnIndex(MediaStore.Audio.AudioColumns.ALBUM_ID)); if (url.endsWith(".mp3") || url.endsWith(".MP3")) { IMediaTrack track = new IMediaTrack(); track.setTitle(title); track.setArtist(singer); track.setId(id); track.setUrl(url); track.setAlbumId(albumid); track.setAlbum(album); track.setDuration(time); list.add(track); } } } try { if (cursor != null) { cursor.close(); } } catch (Exception e) { // Noop. } return list; } public static abstract class Listener { private Handler handler; public Listener() { handler = new Handler(); } void postLoading(final UserCategory category) { handler.post(new Runnable() { @Override public void run() { onLoading(category); } }); } void postLoaded(final UserCategory category, final List<IMediaTrack> tracks) { handler.post(new Runnable() { @Override public void run() { onLoaded(category, tracks); } }); } public abstract void onLoading(UserCategory category); public abstract void onLoaded(UserCategory category, List<IMediaTrack> tracks); } }
apache-2.0
adligo/i_adi.adligo.org
src/org/adligo/i/adi/shared/models/CacheValue.java
3885
package org.adligo.i.adi.shared.models; public class CacheValue { /** * 60 seconds per mintue * 1000 milliseconds per second */ private static final long MILLIS_PER_MINUTE = 60 *1000; private long putTime; private Object value; private String fullPath; private ReferenceAddressName refName; public CacheValue(String pKey, long pPutTime, Object pValue) { init(new ReferenceAddressName(pKey), pPutTime, pValue); } public CacheValue(ReferenceAddressName pRefName, long pPutTime, Object pValue) { init(pRefName, pPutTime, pValue); } private void init(ReferenceAddressName pRefName, long pPutTime, Object pValue) { refName = pRefName; fullPath = pRefName.getFullPath(); putTime = pPutTime; value = pValue; } public String getFullPath() { return fullPath; } public long getPutTime() { return putTime; } public Object getValue() { return value; } /** * this returns a String for the ReferenceDomain index of * CacheValues by time, it is slash and then the number of minutes * before or since the Timestamp 0, and then slash and * the long putTime value * * note that the time crunch string for the minute around (plus or minus) the * Timestamp 0 is actually two minutes long, but since most clocks * will not be in the year 1970 anytime it doesn't matter much * * @return */ public String getTimeCrunchString() { long min = putTime/MILLIS_PER_MINUTE; StringBuffer sb = new StringBuffer(); sb.append("/"); sb.append(min); //note @parentFullPath was added // to remove contention locks in the time index // where many threads would be writing to the same // child Map (java.util.concurrentHashMap) // designated by the minute sb.append(refName.getParentFullPath()); sb.append("/"); sb.append(putTime); sb.append(getLocalWithAtInsteadOfSlash()); return sb.toString(); } private String getLocalWithAtInsteadOfSlash() { String local = refName.getLocalPath(); char [] chars = local.toCharArray(); StringBuffer sb = new StringBuffer(); for (int i = 0; i < chars.length; i++) { char c = chars[i]; if (c == '/') { sb.append('@'); } else { sb.append(c); } } return sb.toString(); } /** * for lookups */ public long getTopTimeCrunch() { return putTime/MILLIS_PER_MINUTE; } public long getTopTimeFromCrunchString(String timeCrunchString) { char [] chars = timeCrunchString.toCharArray(); StringBuffer sb = new StringBuffer(); boolean firstSlash = false; for (int i = 0; i < chars.length; i++) { char c = chars[i]; if (firstSlash) { if (c == '/') { break; } else { sb.append(c); } } else { if (c == '/') { firstSlash = true; } } } return Long.parseLong(sb.toString()); } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("CacheValue [fullPath="); sb.append(fullPath); sb.append(",putTime="); sb.append(putTime); sb.append(",value="); sb.append(value); sb.append("]"); return sb.toString(); } public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((fullPath == null) ? 0 : fullPath.hashCode()); result = prime * result + (int) (putTime ^ (putTime >>> 32)); result = prime * result + ((value == null) ? 0 : value.hashCode()); return result; } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; CacheValue other = (CacheValue) obj; if (fullPath == null) { if (other.fullPath != null) return false; } else if (!fullPath.equals(other.fullPath)) return false; if (putTime != other.putTime) return false; if (value == null) { if (other.value != null) return false; } else if (!value.equals(other.value)) return false; return true; } }
apache-2.0
consulo/consulo
modules/desktop-awt/desktop-util-awt/src/main/java/com/intellij/ui/CachingPainter.java
5082
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ui; import com.intellij.ui.paint.PaintUtil; import com.intellij.util.JBHiDPIScaledImage; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import javax.annotation.Nonnull; import java.awt.*; import java.awt.geom.AffineTransform; import java.awt.image.BufferedImage; import java.awt.image.VolatileImage; import java.util.Arrays; import java.util.Map; import java.util.Objects; import java.util.WeakHashMap; import java.util.function.Consumer; /** * This class allows to cache repeatedly painted elements, by drawing them to an in-memory image, and transferring that image to the target * graphics instead of performing the original painting. */ public class CachingPainter { private static final Map<Object, CachedPainting> ourCache = new WeakHashMap<>(); /** * Performs painting of frequently used pattern, using image cache. {@code x}, {@code y}, {@code width}, {@code height} define the region * where painting is done, {@code painter} performs the actual drawing, it's called with graphics origin set to the origin or the painting * region. Painter logic shouldn't depend on anything except the size of the region and values of {@code parameters}. Result of painting * will be cached with {@code key} as a key, and used for subsequent painting requests with the same region size and parameter values. * <p> * Subpixel-antialiased text shouldn't be rendered using this procedure, as the result depends on the target surface's background color, * and it cannot be determined when cached image is produced. */ public static void paint(@Nonnull Graphics2D g, float x, float y, float width, float height, @Nonnull Consumer<Graphics2D> painter, @Nonnull Object key, @Nonnull Object... parameters) { GraphicsConfiguration config = g.getDeviceConfiguration(); float scale = JBUI.sysScale(config); if ((int)scale != scale) { // fractional-scale setups are not supported currently paintAndDispose((Graphics2D)g.create(), _g -> { _g.setComposite(AlphaComposite.SrcOver); _g.translate(x, y); painter.accept(_g); }); return; } int xInt = (int)Math.floor(x); int yInt = (int)Math.floor(y); int widthInt = (int)Math.ceil(x + width) - xInt; int heightInt = (int)Math.ceil(y + height) - yInt; CachedPainting painting = ourCache.get(key); if (painting != null && !painting.matches(config, width, height, parameters)) { painting = null; } int validationResult = painting == null ? VolatileImage.IMAGE_INCOMPATIBLE : painting.image.validate(config); if (validationResult == VolatileImage.IMAGE_INCOMPATIBLE) { ourCache.put(key, painting = new CachedPainting(config, width, height, widthInt, heightInt, parameters)); } if (validationResult != VolatileImage.IMAGE_OK) { // We cannot perform antialiased rendering onto volatile image using Src composite, so we draw to a buffered image first. BufferedImage bi = new JBHiDPIScaledImage(config, widthInt, heightInt, BufferedImage.TYPE_INT_ARGB, PaintUtil.RoundingMode.ROUND); paintAndDispose(bi.createGraphics(), _g -> { _g.setComposite(AlphaComposite.Src); _g.translate(x - xInt, y - yInt); painter.accept(_g); }); paintAndDispose(painting.image.createGraphics(), _g -> { _g.setComposite(AlphaComposite.Src); UIUtil.drawImage(_g, bi, 0, 0, null); }); } Composite savedComposite = g.getComposite(); g.setComposite(AlphaComposite.SrcOver); g.drawImage(painting.image, xInt, yInt, null); g.setComposite(savedComposite); // We don't check whether volatile image's content was lost at this point, // cause we cannot repeat painting over the initial graphics reliably anyway (without restoring its initial contents first). } private static void paintAndDispose(Graphics2D g, Consumer<Graphics2D> painter) { try { painter.accept(g); } finally { g.dispose(); } } private static class CachedPainting { private final float width; private final float height; private final Object[] parameters; private final VolatileImage image; private final AffineTransform deviceTransform; private CachedPainting(GraphicsConfiguration config, float width, float height, int widthInt, int heightInt, Object[] parameters) { this.width = width; this.height = height; this.parameters = parameters; this.image = config.createCompatibleVolatileImage(widthInt, heightInt, Transparency.TRANSLUCENT); this.deviceTransform = config.getDefaultTransform(); } private boolean matches(GraphicsConfiguration config, float width, float height, Object[] parameters) { return this.width == width && this.height == height && Objects.equals(deviceTransform, config.getDefaultTransform()) && Arrays.equals(this.parameters, parameters); } } }
apache-2.0
ITMAOO/scenic
scenic-wechatserver/src/main/java/com/scenic/wechat/server/bean/BatchTagUsersParameter.java
737
package com.scenic.wechat.server.bean; /** * 封装批量尾用户打标签的请求参数 * <p/> * User: jonnyliu@tcl.com <br/> * Date: on 2016-08-30 11:12. */ public class BatchTagUsersParameter extends BaseBean { private long tagid; private String[] openid_list; public BatchTagUsersParameter(long tagid, String[] openid_list) { this.tagid = tagid; this.openid_list = openid_list; } public long getTagid() { return tagid; } public void setTagid(long tagid) { this.tagid = tagid; } public String[] getOpenid_list() { return openid_list; } public void setOpenid_list(String[] openid_list) { this.openid_list = openid_list; } }
apache-2.0
jexp/idea2
java/java-impl/src/com/intellij/codeInspection/unusedReturnValue/UnusedReturnValue.java
7920
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.unusedReturnValue; import com.intellij.analysis.AnalysisScope; import com.intellij.codeInsight.daemon.GroupNames; import com.intellij.codeInspection.*; import com.intellij.codeInspection.reference.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.search.searches.OverridingMethodsSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.refactoring.changeSignature.ChangeSignatureProcessor; import com.intellij.refactoring.changeSignature.ParameterInfoImpl; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.List; /** * @author max */ public class UnusedReturnValue extends GlobalJavaInspectionTool{ private MakeVoidQuickFix myQuickFix; @Nullable public CommonProblemDescriptor[] checkElement(RefEntity refEntity, AnalysisScope scope, InspectionManager manager, GlobalInspectionContext globalContext, ProblemDescriptionsProcessor processor) { if (refEntity instanceof RefMethod) { final RefMethod refMethod = (RefMethod)refEntity; if (refMethod.isConstructor()) return null; if (refMethod.hasSuperMethods()) return null; if (refMethod.getInReferences().size() == 0) return null; if (!refMethod.isReturnValueUsed()) { return new ProblemDescriptor[]{manager.createProblemDescriptor(refMethod.getElement().getNavigationElement(), InspectionsBundle.message("inspection.unused.return.value.problem.descriptor"), getFix(processor), ProblemHighlightType.GENERIC_ERROR_OR_WARNING)}; } } return null; } protected boolean queryExternalUsagesRequests(final RefManager manager, final GlobalJavaInspectionContext globalContext, final ProblemDescriptionsProcessor processor) { manager.iterate(new RefJavaVisitor() { @Override public void visitElement(RefEntity refEntity) { if (refEntity instanceof RefElement && processor.getDescriptions(refEntity) != null) { refEntity.accept(new RefJavaVisitor() { @Override public void visitMethod(final RefMethod refMethod) { globalContext.enqueueMethodUsagesProcessor(refMethod, new GlobalJavaInspectionContext.UsagesProcessor() { public boolean process(PsiReference psiReference) { processor.ignoreElement(refMethod); return false; } }); } }); } } }); return false; } @NotNull public String getDisplayName() { return InspectionsBundle.message("inspection.unused.return.value.display.name"); } @NotNull public String getGroupDisplayName() { return GroupNames.DECLARATION_REDUNDANCY; } @NotNull public String getShortName() { return "UnusedReturnValue"; } private LocalQuickFix getFix(final ProblemDescriptionsProcessor processor) { if (myQuickFix == null) { myQuickFix = new MakeVoidQuickFix(processor); } return myQuickFix; } @Nullable public QuickFix getQuickFix(String hint) { return getFix(null); } private static class MakeVoidQuickFix implements LocalQuickFix { private final ProblemDescriptionsProcessor myProcessor; private static final Logger LOG = Logger.getInstance("#" + MakeVoidQuickFix.class.getName()); public MakeVoidQuickFix(final ProblemDescriptionsProcessor processor) { myProcessor = processor; } @NotNull public String getName() { return InspectionsBundle.message("inspection.unused.return.value.make.void.quickfix"); } public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { PsiMethod psiMethod = null; if (myProcessor != null) { RefElement refElement = (RefElement)myProcessor.getElement(descriptor); if (refElement.isValid() && refElement instanceof RefMethod) { RefMethod refMethod = (RefMethod)refElement; psiMethod = (PsiMethod) refMethod.getElement(); } } else { psiMethod = PsiTreeUtil.getParentOfType(descriptor.getPsiElement(), PsiMethod.class); } if (psiMethod == null) return; makeMethodHierarchyVoid(project, psiMethod); } @NotNull public String getFamilyName() { return getName(); } private static void makeMethodHierarchyVoid(Project project, @NotNull PsiMethod psiMethod) { replaceReturnStatements(psiMethod); for (final PsiMethod oMethod : OverridingMethodsSearch.search(psiMethod)) { replaceReturnStatements(oMethod); } final PsiParameter[] params = psiMethod.getParameterList().getParameters(); final ParameterInfoImpl[] infos = new ParameterInfoImpl[params.length]; for (int i = 0; i < params.length; i++) { PsiParameter param = params[i]; infos[i] = new ParameterInfoImpl(i, param.getName(), param.getType()); } ChangeSignatureProcessor csp = new ChangeSignatureProcessor(project, psiMethod, false, null, psiMethod.getName(), PsiType.VOID, infos); csp.run(); } private static void replaceReturnStatements(@NotNull final PsiMethod method) { final PsiCodeBlock body = method.getBody(); if (body != null) { final List<PsiReturnStatement> returnStatements = new ArrayList<PsiReturnStatement>(); body.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitReturnStatement(final PsiReturnStatement statement) { super.visitReturnStatement(statement); returnStatements.add(statement); } }); final PsiStatement[] psiStatements = body.getStatements(); final PsiStatement lastStatement = psiStatements[psiStatements.length - 1]; for (PsiReturnStatement returnStatement : returnStatements) { try { final PsiExpression expression = returnStatement.getReturnValue(); if (expression instanceof PsiLiteralExpression || expression instanceof PsiThisExpression) { //avoid side effects if (returnStatement == lastStatement) { returnStatement.delete(); } else { returnStatement .replace(JavaPsiFacade.getInstance(method.getProject()).getElementFactory().createStatementFromText("return;", returnStatement)); } } } catch (IncorrectOperationException e) { LOG.error(e); } } } } } }
apache-2.0
peter-gergely-horvath/nifi
nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/AbstractComponentNode.java
33407
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.controller; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.attribute.expression.language.StandardPropertyValue; import org.apache.nifi.bundle.Bundle; import org.apache.nifi.bundle.BundleCoordinate; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.validation.DisabledServiceValidationResult; import org.apache.nifi.components.validation.ValidationState; import org.apache.nifi.components.validation.ValidationStatus; import org.apache.nifi.components.validation.ValidationTrigger; import org.apache.nifi.controller.service.ControllerServiceDisabledException; import org.apache.nifi.controller.service.ControllerServiceNode; import org.apache.nifi.controller.service.ControllerServiceProvider; import org.apache.nifi.nar.ExtensionManager; import org.apache.nifi.nar.NarCloseable; import org.apache.nifi.registry.ComponentVariableRegistry; import org.apache.nifi.util.CharacterFilterUtils; import org.apache.nifi.util.file.classloader.ClassLoaderUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; public abstract class AbstractComponentNode implements ComponentNode { private static final Logger logger = LoggerFactory.getLogger(AbstractComponentNode.class); private final String id; private final ValidationContextFactory validationContextFactory; private final ControllerServiceProvider serviceProvider; private final AtomicReference<String> name; private final AtomicReference<String> annotationData = new AtomicReference<>(); private final AtomicReference<ValidationContext> validationContext = new AtomicReference<>(); private final String componentType; private final String componentCanonicalClass; private final ComponentVariableRegistry variableRegistry; private final ReloadComponent reloadComponent; private final ExtensionManager extensionManager; private final AtomicBoolean isExtensionMissing; private final Lock lock = new ReentrantLock(); private final ConcurrentMap<PropertyDescriptor, String> properties = new ConcurrentHashMap<>(); private volatile String additionalResourcesFingerprint; private AtomicReference<ValidationState> validationState = new AtomicReference<>(new ValidationState(ValidationStatus.VALIDATING, Collections.emptyList())); private final ValidationTrigger validationTrigger; private volatile boolean triggerValidation = true; public AbstractComponentNode(final String id, final ValidationContextFactory validationContextFactory, final ControllerServiceProvider serviceProvider, final String componentType, final String componentCanonicalClass, final ComponentVariableRegistry variableRegistry, final ReloadComponent reloadComponent, final ExtensionManager extensionManager, final ValidationTrigger validationTrigger, final boolean isExtensionMissing) { this.id = id; this.validationContextFactory = validationContextFactory; this.serviceProvider = serviceProvider; this.name = new AtomicReference<>(componentType); this.componentType = componentType; this.componentCanonicalClass = componentCanonicalClass; this.reloadComponent = reloadComponent; this.variableRegistry = variableRegistry; this.validationTrigger = validationTrigger; this.extensionManager = extensionManager; this.isExtensionMissing = new AtomicBoolean(isExtensionMissing); } @Override public String getIdentifier() { return id; } @Override public void setExtensionMissing(boolean extensionMissing) { this.isExtensionMissing.set(extensionMissing); } @Override public boolean isExtensionMissing() { return isExtensionMissing.get(); } @Override public String getName() { return name.get(); } @Override public void setName(final String name) { this.name.set(CharacterFilterUtils.filterInvalidXmlCharacters(Objects.requireNonNull(name).intern())); } @Override public String getAnnotationData() { return annotationData.get(); } @Override public void setAnnotationData(final String data) { annotationData.set(CharacterFilterUtils.filterInvalidXmlCharacters(data)); logger.debug("Resetting Validation State of {} due to setting annotation data", this); resetValidationState(); } @Override public Set<URL> getAdditionalClasspathResources(final List<PropertyDescriptor> propertyDescriptors) { final Set<String> modulePaths = new LinkedHashSet<>(); for (final PropertyDescriptor descriptor : propertyDescriptors) { if (descriptor.isDynamicClasspathModifier()) { final String value = getProperty(descriptor); if (!StringUtils.isEmpty(value)) { final StandardPropertyValue propertyValue = new StandardPropertyValue(value, null, variableRegistry); modulePaths.add(propertyValue.evaluateAttributeExpressions().getValue()); } } } final Set<URL> additionalUrls = new LinkedHashSet<>(); try { final URL[] urls = ClassLoaderUtils.getURLsForClasspath(modulePaths, null, true); if (urls != null) { for (final URL url : urls) { additionalUrls.add(url); } } } catch (MalformedURLException mfe) { getLogger().error("Error processing classpath resources for " + id + ": " + mfe.getMessage(), mfe); } return additionalUrls; } @Override public void setProperties(final Map<String, String> properties, final boolean allowRemovalOfRequiredProperties) { if (properties == null) { return; } lock.lock(); try { verifyModifiable(); try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(extensionManager, getComponent().getClass(), id)) { boolean classpathChanged = false; for (final Map.Entry<String, String> entry : properties.entrySet()) { // determine if any of the property changes require resetting the InstanceClassLoader final PropertyDescriptor descriptor = getComponent().getPropertyDescriptor(entry.getKey()); if (descriptor.isDynamicClasspathModifier()) { classpathChanged = true; } if (entry.getKey() != null && entry.getValue() == null) { removeProperty(entry.getKey(), allowRemovalOfRequiredProperties); } else if (entry.getKey() != null) { setProperty(entry.getKey(), CharacterFilterUtils.filterInvalidXmlCharacters(entry.getValue())); } } // if at least one property with dynamicallyModifiesClasspath(true) was set, then reload the component with the new urls if (classpathChanged) { logger.info("Updating classpath for " + this.componentType + " with the ID " + this.getIdentifier()); final Set<URL> additionalUrls = getAdditionalClasspathResources(getComponent().getPropertyDescriptors()); try { reload(additionalUrls); } catch (Exception e) { getLogger().error("Error reloading component with id " + id + ": " + e.getMessage(), e); } } } logger.debug("Resetting Validation State of {} due to setting properties", this); resetValidationState(); } finally { lock.unlock(); } } // Keep setProperty/removeProperty private so that all calls go through setProperties private void setProperty(final String name, final String value) { if (null == name || null == value) { throw new IllegalArgumentException("Name or Value can not be null"); } final PropertyDescriptor descriptor = getComponent().getPropertyDescriptor(name); final String oldValue = properties.put(descriptor, value); if (!value.equals(oldValue)) { if (descriptor.getControllerServiceDefinition() != null) { if (oldValue != null) { final ControllerServiceNode oldNode = serviceProvider.getControllerServiceNode(oldValue); if (oldNode != null) { oldNode.removeReference(this); } } final ControllerServiceNode newNode = serviceProvider.getControllerServiceNode(value); if (newNode != null) { newNode.addReference(this); } } try { onPropertyModified(descriptor, oldValue, value); } catch (final Exception e) { // nothing really to do here... } } } /** * Removes the property and value for the given property name if a * descriptor and value exists for the given name. If the property is * optional its value might be reset to default or will be removed entirely * if was a dynamic property. * * @param name the property to remove * @param allowRemovalOfRequiredProperties whether or not the property should be removed if it's required * @return true if removed; false otherwise * @throws java.lang.IllegalArgumentException if the name is null */ private boolean removeProperty(final String name, final boolean allowRemovalOfRequiredProperties) { if (null == name) { throw new IllegalArgumentException("Name can not be null"); } final PropertyDescriptor descriptor = getComponent().getPropertyDescriptor(name); String value = null; final boolean allowRemoval = allowRemovalOfRequiredProperties || !descriptor.isRequired(); if (allowRemoval && (value = properties.remove(descriptor)) != null) { if (descriptor.getControllerServiceDefinition() != null) { if (value != null) { final ControllerServiceNode oldNode = serviceProvider.getControllerServiceNode(value); if (oldNode != null) { oldNode.removeReference(this); } } } try { onPropertyModified(descriptor, value, null); } catch (final Exception e) { getLogger().error(e.getMessage(), e); } return true; } return false; } @Override public Map<PropertyDescriptor, String> getProperties() { try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(extensionManager, getComponent().getClass(), getIdentifier())) { final List<PropertyDescriptor> supported = getComponent().getPropertyDescriptors(); if (supported == null || supported.isEmpty()) { return Collections.unmodifiableMap(properties); } else { final Map<PropertyDescriptor, String> props = new LinkedHashMap<>(); for (final PropertyDescriptor descriptor : supported) { props.put(descriptor, null); } props.putAll(properties); return props; } } } @Override public String getProperty(final PropertyDescriptor property) { return properties.get(property); } @Override public void refreshProperties() { // use setProperty instead of setProperties so we can bypass the class loading logic getProperties().entrySet().stream() .filter(e -> e.getKey() != null && e.getValue() != null) .forEach(e -> setProperty(e.getKey().getName(), e.getValue())); } /** * Generates fingerprint for the additional urls and compares it with the previous * fingerprint value. If the fingerprint values don't match, the function calls the * component's reload() to load the newly found resources. */ @Override public synchronized void reloadAdditionalResourcesIfNecessary() { // Components that don't have any PropertyDescriptors marked `dynamicallyModifiesClasspath` // won't have the fingerprint i.e. will be null, in such cases do nothing if (additionalResourcesFingerprint == null) { return; } final List<PropertyDescriptor> descriptors = new ArrayList<>(this.getProperties().keySet()); final Set<URL> additionalUrls = this.getAdditionalClasspathResources(descriptors); final String newFingerprint = ClassLoaderUtils.generateAdditionalUrlsFingerprint(additionalUrls); if(!StringUtils.equals(additionalResourcesFingerprint, newFingerprint)) { setAdditionalResourcesFingerprint(newFingerprint); try { logger.info("Updating classpath for " + this.componentType + " with the ID " + this.getIdentifier()); reload(additionalUrls); } catch (Exception e) { logger.error("Error reloading component with id " + id + ": " + e.getMessage(), e); } } } @Override public int hashCode() { return 273171 * id.hashCode(); } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } if (obj == null) { return false; } if (!(obj instanceof ComponentNode)) { return false; } final ComponentNode other = (ComponentNode) obj; return id.equals(other.getIdentifier()); } @Override public String toString() { try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(extensionManager, getComponent().getClass(), getComponent().getIdentifier())) { return getComponent().toString(); } } @Override public final void performValidation() { boolean replaced = false; do { final ValidationState validationState = getValidationState(); final ValidationContext validationContext = getValidationContext(); final Collection<ValidationResult> results = new ArrayList<>(); try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(extensionManager, getComponent().getClass(), getIdentifier())) { final Collection<ValidationResult> validationResults = computeValidationErrors(validationContext); results.addAll(validationResults); // validate selected controller services implement the API required by the processor final Collection<ValidationResult> referencedServiceValidationResults = validateReferencedControllerServices(validationContext); results.addAll(referencedServiceValidationResults); } final ValidationStatus status = results.isEmpty() ? ValidationStatus.VALID : ValidationStatus.INVALID; final ValidationState updatedState = new ValidationState(status, results); replaced = replaceValidationState(validationState, updatedState); } while (!replaced); } protected Collection<ValidationResult> computeValidationErrors(final ValidationContext validationContext) { Throwable failureCause = null; try { final Collection<ValidationResult> results = getComponent().validate(validationContext); logger.debug("Computed validation errors with Validation Context {}; results = {}", validationContext, results); return results; } catch (final ControllerServiceDisabledException e) { getLogger().debug("Failed to perform validation due to " + e, e); return Collections.singleton( new DisabledServiceValidationResult("Component", e.getControllerServiceId(), "performing validation depends on referencing a Controller Service that is currently disabled")); } catch (final Exception e) { // We don't want to log this as an error because we will return a ValidationResult that is // invalid. However, we do want to make the stack trace available if needed, so we log it at // a debug level. getLogger().debug("Failed to perform validation due to " + e, e); failureCause = e; } catch (final Error e) { getLogger().error("Failed to perform validation due to " + e, e); failureCause = e; } return Collections.singleton(new ValidationResult.Builder() .subject("Component") .valid(false) .explanation("Failed to perform validation due to " + failureCause) .build()); } protected final Collection<ValidationResult> validateReferencedControllerServices(final ValidationContext validationContext) { final List<PropertyDescriptor> supportedDescriptors = getComponent().getPropertyDescriptors(); if (supportedDescriptors == null) { return Collections.emptyList(); } final Collection<ValidationResult> validationResults = new ArrayList<>(); for (final PropertyDescriptor descriptor : supportedDescriptors) { if (descriptor.getControllerServiceDefinition() == null) { // skip properties that aren't for a controller service continue; } final String controllerServiceId = validationContext.getProperty(descriptor).getValue(); if (controllerServiceId == null) { continue; } final ControllerServiceNode controllerServiceNode = getControllerServiceProvider().getControllerServiceNode(controllerServiceId); if (controllerServiceNode == null) { final ValidationResult result = createInvalidResult(controllerServiceId, descriptor.getDisplayName(), "Invalid Controller Service: " + controllerServiceId + " is not a valid Controller Service Identifier"); validationResults.add(result); continue; } final ValidationResult apiResult = validateControllerServiceApi(descriptor, controllerServiceNode); if (apiResult != null) { validationResults.add(apiResult); continue; } if (!controllerServiceNode.isActive()) { validationResults.add(new DisabledServiceValidationResult(descriptor.getDisplayName(), controllerServiceId)); } } return validationResults; } private ValidationResult validateControllerServiceApi(final PropertyDescriptor descriptor, final ControllerServiceNode controllerServiceNode) { final Class<? extends ControllerService> controllerServiceApiClass = descriptor.getControllerServiceDefinition(); final ClassLoader controllerServiceApiClassLoader = controllerServiceApiClass.getClassLoader(); final ExtensionManager extensionManager = serviceProvider.getExtensionManager(); final String serviceId = controllerServiceNode.getIdentifier(); final String propertyName = descriptor.getDisplayName(); final Bundle controllerServiceApiBundle = extensionManager.getBundle(controllerServiceApiClassLoader); if (controllerServiceApiBundle == null) { return createInvalidResult(serviceId, propertyName, "Unable to find bundle for ControllerService API class " + controllerServiceApiClass.getCanonicalName()); } final BundleCoordinate controllerServiceApiCoordinate = controllerServiceApiBundle.getBundleDetails().getCoordinate(); final Bundle controllerServiceBundle = extensionManager.getBundle(controllerServiceNode.getBundleCoordinate()); if (controllerServiceBundle == null) { return createInvalidResult(serviceId, propertyName, "Unable to find bundle for coordinate " + controllerServiceNode.getBundleCoordinate()); } final BundleCoordinate controllerServiceCoordinate = controllerServiceBundle.getBundleDetails().getCoordinate(); final boolean matchesApi = matchesApi(extensionManager, controllerServiceBundle, controllerServiceApiCoordinate); if (!matchesApi) { final String controllerServiceType = controllerServiceNode.getComponentType(); final String controllerServiceApiType = controllerServiceApiClass.getSimpleName(); final String explanation = new StringBuilder() .append(controllerServiceType).append(" - ").append(controllerServiceCoordinate.getVersion()) .append(" from ").append(controllerServiceCoordinate.getGroup()).append(" - ").append(controllerServiceCoordinate.getId()) .append(" is not compatible with ").append(controllerServiceApiType).append(" - ").append(controllerServiceApiCoordinate.getVersion()) .append(" from ").append(controllerServiceApiCoordinate.getGroup()).append(" - ").append(controllerServiceApiCoordinate.getId()) .toString(); return createInvalidResult(serviceId, propertyName, explanation); } return null; } private ValidationResult createInvalidResult(final String serviceId, final String propertyName, final String explanation) { return new ValidationResult.Builder() .input(serviceId) .subject(propertyName) .valid(false) .explanation(explanation) .build(); } /** * Determines if the given controller service node has the required API as an ancestor. * * @param controllerServiceImplBundle the bundle of a controller service being referenced by a processor * @param requiredApiCoordinate the controller service API required by the processor * @return true if the controller service node has the require API as an ancestor, false otherwise */ private boolean matchesApi(final ExtensionManager extensionManager, final Bundle controllerServiceImplBundle, final BundleCoordinate requiredApiCoordinate) { // start with the coordinate of the controller service for cases where the API and service are in the same bundle BundleCoordinate controllerServiceDependencyCoordinate = controllerServiceImplBundle.getBundleDetails().getCoordinate(); boolean foundApiDependency = false; while (controllerServiceDependencyCoordinate != null) { // determine if the dependency coordinate matches the required API if (requiredApiCoordinate.equals(controllerServiceDependencyCoordinate)) { foundApiDependency = true; break; } // move to the next dependency in the chain, or stop if null final Bundle controllerServiceDependencyBundle = extensionManager.getBundle(controllerServiceDependencyCoordinate); if (controllerServiceDependencyBundle == null) { controllerServiceDependencyCoordinate = null; } else { controllerServiceDependencyCoordinate = controllerServiceDependencyBundle.getBundleDetails().getDependencyCoordinate(); } } return foundApiDependency; } @Override public PropertyDescriptor getPropertyDescriptor(final String name) { try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(extensionManager, getComponent().getClass(), getComponent().getIdentifier())) { return getComponent().getPropertyDescriptor(name); } } @Override public List<PropertyDescriptor> getPropertyDescriptors() { try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(extensionManager, getComponent().getClass(), getComponent().getIdentifier())) { return getComponent().getPropertyDescriptors(); } } private final void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) { try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(extensionManager, getComponent().getClass(), getComponent().getIdentifier())) { getComponent().onPropertyModified(descriptor, oldValue, newValue); } } @Override public ValidationStatus getValidationStatus() { return validationState.get().getStatus(); } @Override public ValidationStatus getValidationStatus(long timeout, TimeUnit timeUnit) { long millis = timeUnit.toMillis(timeout); final long maxTime = System.currentTimeMillis() + millis; synchronized (validationState) { while (getValidationStatus() == ValidationStatus.VALIDATING) { try { final long waitMillis = Math.max(0, maxTime - System.currentTimeMillis()); if (waitMillis <= 0) { break; } validationState.wait(waitMillis); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return getValidationStatus(); } } return getValidationStatus(); } } protected ValidationState getValidationState() { return validationState.get(); } private boolean replaceValidationState(final ValidationState expectedState, final ValidationState newState) { synchronized (validationState) { if (validationState.compareAndSet(expectedState, newState)) { validationState.notifyAll(); return true; } return false; } } protected void resetValidationState() { validationContext.set(null); validationState.set(new ValidationState(ValidationStatus.VALIDATING, Collections.emptyList())); if (isTriggerValidation()) { validationTrigger.triggerAsync(this); } else { logger.debug("Reset validation state of {} but will not trigger async validation because trigger has been paused", this); } } @Override public void pauseValidationTrigger() { triggerValidation = false; } @Override public void resumeValidationTrigger() { triggerValidation = true; final ValidationStatus validationStatus = getValidationStatus(); if (validationStatus == ValidationStatus.VALIDATING) { logger.debug("Resuming Triggering of Validation State for {}; status is VALIDATING so will trigger async validation now", this); validationTrigger.triggerAsync(this); } else { logger.debug("Resuming Triggering of Validation State for {}; status is {} so will not trigger async validation now", this, validationStatus); } } private boolean isTriggerValidation() { return triggerValidation; } @Override public Collection<ValidationResult> getValidationErrors() { return getValidationErrors(Collections.emptySet()); } protected Collection<ValidationResult> getValidationErrors(final Set<ControllerServiceNode> servicesToIgnore) { final ValidationState validationState = this.validationState.get(); if (validationState.getStatus() == ValidationStatus.VALIDATING) { return null; } final Collection<ValidationResult> validationErrors = validationState.getValidationErrors(); if (servicesToIgnore == null || servicesToIgnore.isEmpty()) { return validationErrors; } final Set<String> ignoredServiceIds = servicesToIgnore.stream() .map(ControllerServiceNode::getIdentifier) .collect(Collectors.toSet()); final List<ValidationResult> retainedValidationErrors = new ArrayList<>(); for (final ValidationResult result : validationErrors) { if (!(result instanceof DisabledServiceValidationResult)) { retainedValidationErrors.add(result); continue; } final String serviceId = ((DisabledServiceValidationResult) result).getControllerServiceIdentifier(); if (!ignoredServiceIds.contains(serviceId)) { retainedValidationErrors.add(result); } } return retainedValidationErrors; } public abstract void verifyModifiable() throws IllegalStateException; /** * */ ControllerServiceProvider getControllerServiceProvider() { return this.serviceProvider; } @Override public String getCanonicalClassName() { return componentCanonicalClass; } @Override public String getComponentType() { return componentType; } protected ValidationContextFactory getValidationContextFactory() { return this.validationContextFactory; } protected ValidationContext getValidationContext() { while (true) { ValidationContext context = this.validationContext.get(); if (context != null) { return context; } // Use a lock here because we want to prevent calls to getProperties() from happening while setProperties() is also happening. final Map<PropertyDescriptor, String> properties; lock.lock(); try { properties = getProperties(); } finally { lock.unlock(); } context = getValidationContextFactory().newValidationContext(properties, getAnnotationData(), getProcessGroupIdentifier(), getIdentifier()); final boolean updated = validationContext.compareAndSet(null, context); if (updated) { logger.debug("Updating validation context to {}", context); return context; } } } @Override public ComponentVariableRegistry getVariableRegistry() { return this.variableRegistry; } protected ReloadComponent getReloadComponent() { return this.reloadComponent; } protected ExtensionManager getExtensionManager() { return this.extensionManager; } @Override public void verifyCanUpdateBundle(final BundleCoordinate incomingCoordinate) throws IllegalArgumentException { final BundleCoordinate existingCoordinate = getBundleCoordinate(); // determine if this update is changing the bundle for the processor if (!existingCoordinate.equals(incomingCoordinate)) { // if it is changing the bundle, only allow it to change to a different version within same group and id if (!existingCoordinate.getGroup().equals(incomingCoordinate.getGroup()) || !existingCoordinate.getId().equals(incomingCoordinate.getId())) { throw new IllegalArgumentException(String.format( "Unable to update component %s from %s to %s because bundle group and id must be the same.", getIdentifier(), existingCoordinate.getCoordinate(), incomingCoordinate.getCoordinate())); } } } protected void setAdditionalResourcesFingerprint(String additionalResourcesFingerprint) { this.additionalResourcesFingerprint = additionalResourcesFingerprint; } }
apache-2.0
Killerardvark/CryodexSource
src/main/java/cryodex/widget/wizard/pages/ProgressionCutPage.java
5501
package cryodex.widget.wizard.pages; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.FlowLayout; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.SpringLayout; import cryodex.CryodexController; import cryodex.Language; import cryodex.Player; import cryodex.modules.Tournament; import cryodex.modules.WizardController; import cryodex.modules.xwing.XWingPlayer; import cryodex.modules.xwing.XWingTournament; import cryodex.widget.ComponentUtils; import cryodex.widget.SpringUtilities; import cryodex.widget.wizard.TournamentWizard; import cryodex.widget.wizard.WizardOptions; import cryodex.widget.wizard.WizardUtils; public class ProgressionCutPage implements Page { private JPanel pagePanel = null; private final Map<XWingTournament, JCheckBox> checkBoxMap = new HashMap<XWingTournament, JCheckBox>(); private JLabel maxPlayersLabel = null; private JLabel minPointsLabel = null; private JTextField maxPlayersTF = null; private JTextField minPointsTF = null; private WizardController wizardController; public ProgressionCutPage(WizardController wizardController) { this.wizardController = wizardController; } @Override public JPanel getPanel() { TournamentWizard.getInstance().setButtonVisibility(true, true, false); TournamentWizard.getInstance().setMinimumSize(new Dimension(450, 500)); if (pagePanel == null) { pagePanel = new JPanel(new BorderLayout()); JLabel header = new JLabel("<HTML><H3>" + Language.select_tournaments + "</H3></HTML>"); JPanel listPanel = new JPanel(new SpringLayout()); for (Tournament t : CryodexController.getAllTournaments()) { JCheckBox cb = new JCheckBox(t.getName()); if (t instanceof XWingTournament) { checkBoxMap.put((XWingTournament) t, cb); } listPanel.add(cb); } SpringUtilities.makeCompactGrid(listPanel, listPanel.getComponentCount(), 1, 0, 0, 0, 0); JLabel playersFromLabel = new JLabel("<HTML><H3>" + Language.additional_information + "</H3></HTML>"); maxPlayersLabel = new JLabel(Language.max_players); minPointsLabel = new JLabel(Language.min_points); maxPlayersTF = new JTextField(3); minPointsTF = new JTextField(3); JPanel maxPlayer = ComponentUtils.addToHorizontalBorderLayout(maxPlayersLabel, maxPlayersTF, null); JPanel minPoints = ComponentUtils.addToHorizontalBorderLayout(minPointsLabel, minPointsTF, null); JPanel infoPanel = ComponentUtils.addToVerticalBorderLayout(playersFromLabel, maxPlayer, minPoints); pagePanel.add(ComponentUtils.addToFlowLayout(header, FlowLayout.LEFT), BorderLayout.NORTH); pagePanel.add(ComponentUtils.addToFlowLayout(listPanel, FlowLayout.LEFT), BorderLayout.CENTER); pagePanel.add(ComponentUtils.addToFlowLayout(infoPanel, FlowLayout.CENTER), BorderLayout.SOUTH); } return ComponentUtils.addToFlowLayout(pagePanel, FlowLayout.CENTER); } @Override public void onNext() { WizardOptions wizardOptions = TournamentWizard.getInstance().getWizardOptions(); List<Tournament> tournamentList = new ArrayList<Tournament>(); Set<Player> playerList = new TreeSet<Player>(); Integer playerCount = null; Integer minPoints = null; try { playerCount = Integer.parseInt(maxPlayersTF.getText()); } catch (NumberFormatException e) { // Leave it as null } try { minPoints = Integer.parseInt(minPointsTF.getText()); } catch (NumberFormatException e) { // Leave it as null } for (XWingTournament t : checkBoxMap.keySet()) { if (checkBoxMap.get(t).isSelected()) { tournamentList.add(t); playerList.addAll(t.getPlayers()); } } if(tournamentList.isEmpty()){ return; } wizardOptions.setPlayerList(new ArrayList<Player>(playerList)); wizardOptions.setSelectedTournaments(tournamentList); List<Player> rankedPlayers = WizardUtils.rankMergedPlayers(wizardOptions); XWingTournament mergedTournament = (XWingTournament) WizardUtils.getMergedTournament(wizardOptions); List<Player> playersToAdd = new ArrayList<Player>(); for (Player p : rankedPlayers) { XWingPlayer xp = mergedTournament.getModulePlayer(p); if (playerCount != null && playersToAdd.size() >= playerCount) { break; } if (minPoints != null && xp.getScore(mergedTournament) < minPoints) { continue; } playersToAdd.add(p); } wizardOptions.setPlayerList(playersToAdd); TournamentWizard.getInstance().setCurrentPage(wizardController.getAdditionalOptionsPage()); } @Override public void onPrevious() { TournamentWizard.getInstance().goToPrevious(); } @Override public void onFinish() { // Do nothing } }
apache-2.0
google-code-export/google-api-dfp-java
examples/v201306/lineitemcreativeassociationservice/GetLicasByStatementExample.java
3108
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package v201306.lineitemcreativeassociationservice; import com.google.api.ads.dfp.lib.DfpService; import com.google.api.ads.dfp.lib.DfpServiceLogger; import com.google.api.ads.dfp.lib.DfpUser; import com.google.api.ads.dfp.lib.utils.v201306.StatementBuilder; import com.google.api.ads.dfp.v201306.LineItemCreativeAssociation; import com.google.api.ads.dfp.v201306.LineItemCreativeAssociationPage; import com.google.api.ads.dfp.v201306.LineItemCreativeAssociationServiceInterface; import com.google.api.ads.dfp.v201306.Statement; /** * This example gets all line item creative associations for a given line item * ID. The statement retrieves up to the maximum page size limit of 500. To * create LICAs, run CreateLicasExample.java. To determine which line items * exist, run GetAllLineItemsExample.java. * * Tags: LineItemCreativeAssociationService.getLineItemCreativeAssociationsByStatement * * @author api.arogal@gmail.com (Adam Rogal) */ public class GetLicasByStatementExample { public static void main(String[] args) { try { // Log SOAP XML request and response. DfpServiceLogger.log(); // Get DfpUser from "~/dfp.properties". DfpUser user = new DfpUser(); // Get the LineItemCreativeAssociationService. LineItemCreativeAssociationServiceInterface licaService = user.getService(DfpService.V201306.LINEITEMCREATIVEASSOCIATION_SERVICE); // Set the line item to get LICAs by. Long lineItemId = Long.parseLong("INSERT_LINE_ITEM_ID_HERE"); // Create a statement to only select LICAs for the given lineItem ID. Statement filterStatement = new StatementBuilder("WHERE lineItemId = :lineItemId LIMIT 500") .putValue("lineItemId", lineItemId).toStatement(); // Get LICAs by statement. LineItemCreativeAssociationPage page = licaService.getLineItemCreativeAssociationsByStatement(filterStatement); if (page.getResults() != null) { int i = page.getStartIndex(); for (LineItemCreativeAssociation lica : page.getResults()) { System.out.println(i + ") LICA with line item ID \"" + lica.getLineItemId() + "\", creative ID \"" + lica.getCreativeId() + "\", and status \"" + lica.getStatus() + "\" was found."); i++; } } System.out.println("Number of results found: " + page.getTotalResultSetSize()); } catch (Exception e) { e.printStackTrace(); } } }
apache-2.0
pdecat/facelets
src/test/java/com/sun/facelets/tag/jsf/core/CoreTestCase.java
12838
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.facelets.tag.jsf.core; import java.util.Date; import java.util.Locale; import java.util.Map; import java.util.TimeZone; import javax.faces.component.UICommand; import javax.faces.component.UIComponent; import javax.faces.component.UIData; import javax.faces.component.UIInput; import javax.faces.component.UIOutput; import javax.faces.component.UIViewRoot; import javax.faces.component.html.HtmlGraphicImage; import javax.faces.context.FacesContext; import javax.faces.convert.DateTimeConverter; import javax.faces.event.ActionListener; import javax.faces.validator.Validator; import javax.servlet.http.HttpServletResponse; import com.sun.facelets.Facelet; import com.sun.facelets.FaceletFactory; import com.sun.facelets.FaceletTestCase; public class CoreTestCase extends FaceletTestCase { public void testActionListenerHandler() throws Exception { ActionListener listener = new ActionListenerImpl(); FacesContext faces = FacesContext.getCurrentInstance(); faces.getExternalContext().getRequestMap().put("actionListener", listener); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("actionListener.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UICommand action1 = (UICommand) root.findComponent("action1"); UICommand action2 = (UICommand) root.findComponent("action2"); assertNotNull("action1", action1); assertNotNull("action2", action2); assertEquals("action1 listeners", 1, action1.getActionListeners().length); assertEquals("action2 listeners", 2, action2.getActionListeners().length); assertNull(faces.getExternalContext().getRequestMap().get("CoreTestCase.testActionListenerHandler")); action2.getActionListeners()[0].processAction(null); assertEquals("OK", faces.getExternalContext().getRequestMap().get("CoreTestCase.testActionListenerHandler")); } public void testAttributeHandler() throws Exception { String title = "Dog in a Funny Hat"; FacesContext faces = FacesContext.getCurrentInstance(); faces.getExternalContext().getRequestMap().put("title", title); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("attribute.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); HtmlGraphicImage graphic1 = (HtmlGraphicImage) root .findComponent("graphic1"); HtmlGraphicImage graphic2 = (HtmlGraphicImage) root .findComponent("graphic2"); assertNotNull("graphic1", graphic1); assertNotNull("graphic2", graphic2); assertEquals("graphic1 title", "literal", graphic1.getTitle()); assertEquals("graphic2 title", title, graphic2.getTitle()); } public void testConvertDateTimeHandler() throws Exception { Date now = new Date(1000 * 360 * 60 * 24 * 7); FacesContext faces = FacesContext.getCurrentInstance(); faces.getExternalContext().getRequestMap().put("now", now); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("convertDateTime.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIOutput out1 = (UIOutput) root.findComponent("form:out1"); UIOutput out2 = (UIOutput) root.findComponent("form:out2"); UIOutput out3 = (UIOutput) root.findComponent("form:out3"); UIOutput out4 = (UIOutput) root.findComponent("form:out4"); UIOutput out5 = (UIOutput) root.findComponent("form:out5"); UIOutput out6 = (UIOutput) root.findComponent("form:out6"); assertNotNull("out1", out1); assertNotNull("out2", out2); assertNotNull("out3", out3); assertNotNull("out4", out4); assertNotNull("out5", out5); assertNotNull("out6", out6); assertNotNull("out1 converter", out1.getConverter()); assertNotNull("out2 converter", out2.getConverter()); assertNotNull("out3 converter", out3.getConverter()); assertNotNull("out4 converter", out4.getConverter()); assertNotNull("out5 converter", out5.getConverter()); DateTimeConverter converter6 = (DateTimeConverter)out6.getConverter(); assertEquals("out1 value", "12/24/69", out1.getConverter().getAsString( faces, out1, now)); assertEquals("out2 value", "12/24/69 6:57:12 AM", out2.getConverter() .getAsString(faces, out2, now)); assertEquals("out3 value", "Dec 24, 1969", out3.getConverter() .getAsString(faces, out3, now)); assertEquals("out4 value", "6:57:12 AM", out4.getConverter() .getAsString(faces, out4, now)); assertEquals("out5 value", "0:57 AM, CST", out5.getConverter() .getAsString(faces, out5, now)); assertEquals("Timezone should be GMT", TimeZone.getTimeZone("GMT"), converter6.getTimeZone()); } public void testConvertDelegateHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("converter.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIOutput out1 = (UIOutput) root.findComponent("out1"); assertNotNull("out1", out1); assertNotNull("out1 converter", out1.getConverter()); assertEquals("out1 value", new Double(42.5), out1.getConverter().getAsObject(faces, out1, out1.getLocalValue().toString())); } public void testConvertNumberHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("convertNumber.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIOutput out1 = (UIOutput) root.findComponent("out1"); UIOutput out2 = (UIOutput) root.findComponent("out2"); UIOutput out3 = (UIOutput) root.findComponent("out3"); UIOutput out4 = (UIOutput) root.findComponent("out4"); UIOutput out5 = (UIOutput) root.findComponent("out5"); assertNotNull("out1", out1); assertNotNull("out2", out2); assertNotNull("out3", out3); assertNotNull("out4", out4); assertNotNull("out5", out5); assertNotNull("out1 converter", out1.getConverter()); assertNotNull("out2 converter", out2.getConverter()); assertNotNull("out3 converter", out3.getConverter()); assertNotNull("out4 converter", out4.getConverter()); assertNotNull("out5 converter", out5.getConverter()); assertEquals("out1 value", "12", out1.getConverter().getAsString( faces, out1, new Double(12.001))); assertEquals("out2 value", "$12.00", out2.getConverter() .getAsString(faces, out2, new Double(12.00))); assertEquals("out3 value", "00,032", out3.getConverter() .getAsString(faces, out3, new Double(32))); assertEquals("out4 value", "0.67", out4.getConverter() .getAsString(faces, out4, new Double(2.0/3.0))); assertEquals("out5 value", "67%", out5.getConverter() .getAsString(faces, out5, new Double(0.67))); } public void testFacetHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("facet.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIData data = (UIData) root.findComponent("table"); assertNotNull("data", data); UIComponent footer = data.getFooter(); assertNotNull("footer", footer); } public void testLoadBundleHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("loadBundle.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); Object value = faces.getExternalContext().getRequestMap().get("foo"); assertNotNull("bundle loaded into request", value); assertTrue(value instanceof Map); String result = (String)((Map)value).get("some.not.found.key"); assertTrue(result.contains("???")); } public void testValidateDelegateHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("validator.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIInput input = (UIInput) root.findComponent("form:input"); assertNotNull("input", input); assertEquals("input validator", 1, input.getValidators().length); Validator v = input.getValidators()[0]; v.validate(faces, input, "4333"); } public void testValidateDoubleRangeHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("validateDoubleRange.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIInput input = (UIInput) root.findComponent("form:input"); assertNotNull("input", input); assertEquals("input validator", 1, input.getValidators().length); Validator v = input.getValidators()[0]; v.validate(faces, input, new Double(1.8)); } public void testValidateLengthHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("validateLength.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIInput input = (UIInput) root.findComponent("form:input"); assertNotNull("input", input); assertEquals("input validator", 1, input.getValidators().length); Validator v = input.getValidators()[0]; v.validate(faces, input, "beans"); } public void testValidateLongRangeHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("validateLongRange.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIInput input = (UIInput) root.findComponent("form:input"); assertNotNull("input", input); assertEquals("input validator", 1, input.getValidators().length); Validator v = input.getValidators()[0]; v.validate(faces, input, new Long(2000)); } public void testValueChangeListenerHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("valueChangeListener.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); UIInput input = (UIInput) root.findComponent("form:input"); assertNotNull("input", input); assertEquals("input listener", 1, input.getValueChangeListeners().length); } public void testViewHandler() throws Exception { FacesContext faces = FacesContext.getCurrentInstance(); FaceletFactory f = FaceletFactory.getInstance(); Facelet at = f.getFacelet("view.xml"); UIViewRoot root = faces.getViewRoot(); at.apply(faces, root); assertEquals("german locale", Locale.GERMAN, root.getLocale()); } }
apache-2.0
krevelen/coala
coala-adapters/coala-guice3-adapter/src/main/java/io/coala/guice/log/JULMembersInjector.java
2157
/* $Id$ * $URL: https://dev.almende.com/svn/abms/guice-util/src/main/java/io/coala/guice/log/JULMembersInjector.java $ * * Part of the EU project Adapt4EE, see http://www.adapt4ee.eu/ * * @license * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * Copyright (c) 2010-2013 Almende B.V. */ package io.coala.guice.log; import io.coala.log.LogUtil; import io.coala.model.ModelComponent; import java.lang.reflect.Field; import org.apache.log4j.Logger; import com.google.inject.MembersInjector; /** * {@link JULMembersInjector} * * @date $Date: 2014-04-18 17:39:26 +0200 (Fri, 18 Apr 2014) $ * @version $Revision: 240 $ * @author <a href="mailto:Rick@almende.org">Rick</a> * * @param <T> */ class JULMembersInjector<T> implements MembersInjector<T> { /** */ private static final Logger LOG = LogUtil .getLogger(JULMembersInjector.class); /** */ private final Field field; /** * {@link JULMembersInjector} constructor * * @param field */ public JULMembersInjector(final Field field) { this.field = field; field.setAccessible(true); } @Override public void injectMembers(final T t) { final java.util.logging.Logger logger = t instanceof ModelComponent ? LogUtil .getJavaLogger(((ModelComponent<?>) t).getID() + " " + this.field.getDeclaringClass().getName()) : LogUtil .getJavaLogger(this.field.getDeclaringClass().getName()); try { this.field.set(t, logger); LOG.trace("Injected " + java.util.logging.Logger.class.getSimpleName() + " into a " + t.getClass().getSimpleName()); } catch (final IllegalAccessException e) { throw new RuntimeException(e); } } }
apache-2.0
justinhrobbins/FlashCards_App
FlashCards_UI/FlashCards_GWT/src/main/java/org/robbins/flashcards/util/ConstsUtil.java
1501
package org.robbins.flashcards.util; public final class ConstsUtil { private ConstsUtil() { } public static final String EDIT_FLASHCARD = "editFlashCard"; public static final String FLASHCARD_FORM = "flashCardForm"; public static final String LIST_FLASHCARDS = "listFlashCards"; public static final String LIST_TAGS = "listTags"; public static final String LOGIN = "login"; public static final String LOGOUT = "logout"; public static final String LOG_IN_URL = "/api/login/loginForm.jsp"; public static final String OPEN_ID_URL = "/api/login/openIdServlet"; public static final String SHELL_VIEW = "ShellView"; public static final String TAG_FORM = "tagForm"; public static final String USER_ID = "userId"; public static final String FLASHCARD_REST_URL = "/api/v1/flashcards"; public static final String TAG_REST_URL = "/api/v1/tags"; public static final String USER_REST_URL = "/api/v1/users"; public static final String DEFAULT_FLASHCARDS_LIST_FIELDS = "id,question,answer,tags,name"; public static final String DEFAULT_FLASHCARDS_FIELDS = "id,question,answer,tags,name,links,createdDate,lastModifiedDate"; public static final String DEFAULT_TAGS_LIST_FIELDS = "id,name,flashcards,question"; public static final String DEFAULT_TAGS_FIELDS = "id,name,flashcards,question,createdDate,lastModifiedDate"; public static final String DEFAULT_AUTH_HEADER = "Basic YXBpdXNlcjphcGl1c2VycGFzc3dvcmQ="; }
apache-2.0
jonvestal/open-kilda
src-java/flowhs-topology/flowhs-storm-topology/src/test/java/org/openkilda/wfm/topology/flowhs/service/FlowCreateServiceTest.java
16804
/* Copyright 2019 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openkilda.wfm.topology.flowhs.service; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.when; import org.openkilda.floodlight.api.request.EgressFlowSegmentInstallRequest; import org.openkilda.floodlight.api.request.EgressFlowSegmentVerifyRequest; import org.openkilda.floodlight.api.request.FlowSegmentRequest; import org.openkilda.floodlight.api.request.IngressFlowSegmentInstallRequest; import org.openkilda.floodlight.api.request.IngressFlowSegmentVerifyRequest; import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse; import org.openkilda.floodlight.flow.response.FlowErrorResponse; import org.openkilda.floodlight.flow.response.FlowErrorResponse.ErrorCode; import org.openkilda.messaging.command.flow.FlowRequest; import org.openkilda.model.Flow; import org.openkilda.model.FlowEndpoint; import org.openkilda.model.FlowPathStatus; import org.openkilda.model.FlowStatus; import org.openkilda.pce.GetPathsResult; import org.openkilda.pce.exception.RecoverableException; import org.openkilda.pce.exception.UnroutableFlowException; import org.openkilda.wfm.CommandContext; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.UUID; @RunWith(MockitoJUnitRunner.class) public class FlowCreateServiceTest extends AbstractFlowTest { @Mock private FlowCreateHubCarrier carrier; @Before public void init() { doAnswer(getSpeakerCommandsAnswer()).when(carrier).sendSpeakerRequest(any(FlowSegmentRequest.class)); } @Test public void shouldCreateFlowWithTransitSwitches() throws Exception { FlowRequest request = makeRequest() .flowId("test_successful_flow_id") .build(); preparePathComputation(request.getFlowId(), make3SwitchesPathPair()); testHappyPath(request, "successful_flow_create"); } @Test public void shouldCreateOneSwitchFlow() throws Exception { FlowRequest request = makeRequest() .flowId("one_switch_flow") .destination(new FlowEndpoint(SWITCH_SOURCE, 2, 2)) .build(); preparePathComputation(request.getFlowId(), makeOneSwitchPathPair()); testHappyPath(request, "successful_flow_create"); } @Test public void shouldCreatePinnedFlow() throws Exception { FlowRequest request = makeRequest() .flowId("test_successful_flow_id") .pinned(true) .build(); preparePathComputation(request.getFlowId(), make3SwitchesPathPair()); Flow result = testHappyPath(request, "successful_flow_create"); Assert.assertTrue(result.isPinned()); } @Test public void shouldCreateFlowWithProtectedPath() throws Exception { FlowRequest request = makeRequest() .flowId("test_successful_flow_id") .allocateProtectedPath(true) .build(); when(pathComputer.getPath(makeFlowArgumentMatch(request.getFlowId()))) .thenReturn(make2SwitchesPathPair()) .thenReturn(make3SwitchesPathPair()); Flow result = testHappyPath(request, "successful_flow_create"); Assert.assertTrue(result.isAllocateProtectedPath()); verifyFlowPathStatus(result.getProtectedForwardPath(), FlowPathStatus.ACTIVE, "protected-forward"); verifyFlowPathStatus(result.getProtectedReversePath(), FlowPathStatus.ACTIVE, "protected-reverse"); } private Flow testHappyPath(FlowRequest flowRequest, String key) { FlowCreateService service = makeService(); service.handleRequest(key, new CommandContext(), flowRequest); Flow inProgress = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.IN_PROGRESS); verifyFlowPathStatus(inProgress.getForwardPath(), FlowPathStatus.IN_PROGRESS, "forward"); verifyFlowPathStatus(inProgress.getReversePath(), FlowPathStatus.IN_PROGRESS, "reverse"); verifyNorthboundSuccessResponse(carrier); FlowSegmentRequest request; while ((request = requests.poll()) != null) { if (request.isVerifyRequest()) { service.handleAsyncResponse(key, buildResponseOnVerifyRequest(request)); } else { handleResponse(service, key, request); } } Flow result = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.UP); verifyFlowPathStatus(result.getForwardPath(), FlowPathStatus.ACTIVE, "forward"); verifyFlowPathStatus(result.getReversePath(), FlowPathStatus.ACTIVE, "reverse"); return result; } @Test public void shouldRollbackIfEgressRuleNotInstalled() throws Exception { when(pathComputer.getPath(any(Flow.class))).thenReturn(make3SwitchesPathPair()); String key = "failed_flow_create"; FlowRequest flowRequest = makeRequest() .flowId("failed_flow_id") .build(); FlowCreateService service = makeService(); service.handleRequest(key, new CommandContext(), flowRequest); Flow inProgress = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.IN_PROGRESS); verifyFlowPathStatus(inProgress.getForwardPath(), FlowPathStatus.IN_PROGRESS, "forward"); verifyFlowPathStatus(inProgress.getReversePath(), FlowPathStatus.IN_PROGRESS, "reverse"); verifyNorthboundSuccessResponse(carrier); FlowSegmentRequest request; int installCommands = 0; int deleteCommands = 0; while ((request = requests.poll()) != null) { if (request.isVerifyRequest()) { service.handleAsyncResponse(key, buildResponseOnVerifyRequest(request)); } else if (request.isInstallRequest()) { installCommands++; if (requests.size() > 1) { handleResponse(service, key, request); } else { handleErrorResponse(service, key, request, ErrorCode.UNKNOWN); } } else if (request.isRemoveRequest()) { deleteCommands++; handleResponse(service, key, request); } } assertEquals("All installed rules should be deleted", installCommands, deleteCommands); Flow result = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.DOWN); // TODO(surabujin): do we really want to create flow without paths? Assert.assertNull(result.getForwardPath()); Assert.assertNull(result.getReversePath()); } @Test public void shouldRollbackIfIngressRuleNotInstalled() throws Exception { when(pathComputer.getPath(any(Flow.class))).thenReturn(make3SwitchesPathPair()); String key = "failed_flow_create"; FlowRequest flowRequest = makeRequest() .flowId("failed_flow_id") .build(); FlowCreateService service = makeService(); service.handleRequest(key, new CommandContext(), flowRequest); Flow inProgress = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.IN_PROGRESS); verifyFlowPathStatus(inProgress.getForwardPath(), FlowPathStatus.IN_PROGRESS, "forward"); verifyFlowPathStatus(inProgress.getReversePath(), FlowPathStatus.IN_PROGRESS, "reverse"); verifyNorthboundSuccessResponse(carrier); FlowSegmentRequest request; int installCommands = 0; int deleteCommands = 0; while ((request = requests.poll()) != null) { if (request.isVerifyRequest()) { service.handleAsyncResponse(key, buildResponseOnVerifyRequest(request)); } else if (request.isInstallRequest()) { installCommands++; if (requests.size() > 1 || request instanceof EgressFlowSegmentInstallRequest) { handleResponse(service, key, request); } else { handleErrorResponse(service, key, request, ErrorCode.UNKNOWN); } } else if (request.isRemoveRequest()) { deleteCommands++; handleResponse(service, key, request); } } assertEquals("All installed rules should be deleted", installCommands, deleteCommands); Flow result = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.DOWN); Assert.assertNull(result.getForwardPath()); Assert.assertNull(result.getReversePath()); } @Test public void shouldRetryNotIngressRequestOnSwitchUnavailable() throws Exception { testSpeakerCommandRetry(EgressFlowSegmentInstallRequest.class, ErrorCode.SWITCH_UNAVAILABLE, true); } @Test public void shouldRetryNotIngressRequestOnTimeout() throws Exception { testSpeakerCommandRetry(EgressFlowSegmentInstallRequest.class, ErrorCode.OPERATION_TIMED_OUT, true); } @Test public void shouldRetryIngressRequestOnSwitchUnavailable() throws Exception { testSpeakerCommandRetry(IngressFlowSegmentInstallRequest.class, ErrorCode.SWITCH_UNAVAILABLE, true); } @Test public void shouldRetryIngressRequestOnTimeout() throws Exception { testSpeakerCommandRetry(EgressFlowSegmentInstallRequest.class, ErrorCode.OPERATION_TIMED_OUT, true); } @Test public void shouldRetryNotIngressValidationRequestOnSwitchUnavailable() throws Exception { testSpeakerCommandRetry(EgressFlowSegmentVerifyRequest.class, ErrorCode.SWITCH_UNAVAILABLE, true); } @Test public void shouldRetryIngressValidationRequestOnSwitchUnavailable() throws Exception { testSpeakerCommandRetry(IngressFlowSegmentVerifyRequest.class, ErrorCode.SWITCH_UNAVAILABLE, true); } @Test public void shouldNotRetryValidationOnPermanentError() throws Exception { testSpeakerCommandRetry(EgressFlowSegmentVerifyRequest.class, ErrorCode.MISSING_OF_FLOWS, false); } private void testSpeakerCommandRetry(Class<?> failRequest, ErrorCode error, boolean mustRetry) throws Exception { String key = "retries_non_ingress_installation"; FlowRequest flowRequest = makeRequest() .flowId("dummy_flow_id") .build(); int retriesLimit = 10; FlowCreateService service = makeService(retriesLimit); preparePathComputation(flowRequest.getFlowId(), make2SwitchesPathPair()); service.handleRequest(key, new CommandContext(), flowRequest); Flow inProgress = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.IN_PROGRESS); verifyFlowPathStatus(inProgress.getForwardPath(), FlowPathStatus.IN_PROGRESS, "forward"); verifyFlowPathStatus(inProgress.getReversePath(), FlowPathStatus.IN_PROGRESS, "reverse"); verifyNorthboundSuccessResponse(carrier); Set<UUID> producedErrors = new HashSet<>(); Map<UUID, Integer> remainingRetries = new HashMap<>(); Map<UUID, Integer> seenCounter = new HashMap<>(); FlowSegmentRequest request; while ((request = requests.poll()) != null) { UUID commandId = request.getCommandId(); seenCounter.put(commandId, seenCounter.getOrDefault(commandId, 0) + 1); Integer remaining = remainingRetries.getOrDefault(commandId, retriesLimit); if (failRequest.isInstance(request) && remaining > 0) { producedErrors.add(commandId); remainingRetries.put(commandId, remaining - 1); handleErrorResponse(service, key, request, error); } else if (request.isVerifyRequest()) { service.handleAsyncResponse(key, buildResponseOnVerifyRequest(request)); } else { handleResponse(service, key, request); } } Assert.assertFalse(producedErrors.isEmpty()); for (Map.Entry<UUID, Integer> entry : seenCounter.entrySet()) { if (! producedErrors.contains(entry.getKey())) { continue; } Integer counter = entry.getValue(); if (mustRetry) { Assert.assertEquals(retriesLimit + 1, (int) counter); } else { Assert.assertEquals(1, (int) counter); } } if (mustRetry) { Flow result = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.UP); verifyFlowPathStatus(result.getForwardPath(), FlowPathStatus.ACTIVE, "forward"); verifyFlowPathStatus(result.getReversePath(), FlowPathStatus.ACTIVE, "reverse"); } else { verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.DOWN); } } @Test public void shouldNotRetryForever() throws Exception { String key = "retries_non_ingress_installation"; FlowRequest flowRequest = makeRequest() .flowId("dummy_flow_id") .build(); int retriesLimit = 10; FlowCreateService service = makeService(retriesLimit); preparePathComputation(flowRequest.getFlowId(), make2SwitchesPathPair()); service.handleRequest(key, new CommandContext(), flowRequest); Flow inProgress = verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.IN_PROGRESS); verifyFlowPathStatus(inProgress.getForwardPath(), FlowPathStatus.IN_PROGRESS, "forward"); verifyFlowPathStatus(inProgress.getReversePath(), FlowPathStatus.IN_PROGRESS, "reverse"); verifyNorthboundSuccessResponse(carrier); FlowSegmentRequest request; Map<UUID, Integer> remainingRetries = new HashMap<>(); while ((request = requests.poll()) != null) { UUID commandId = request.getCommandId(); Integer remaining = remainingRetries.getOrDefault(commandId, retriesLimit + 1); Assert.assertTrue(0 < remaining); if (request instanceof EgressFlowSegmentInstallRequest) { remainingRetries.put(commandId, remaining - 1); handleErrorResponse(service, key, request, ErrorCode.SWITCH_UNAVAILABLE); } else if (request.isVerifyRequest()) { service.handleAsyncResponse(key, buildResponseOnVerifyRequest(request)); } else { handleResponse(service, key, request); } } verifyFlowStatus(flowRequest.getFlowId(), FlowStatus.DOWN); } private void handleResponse(FlowCreateService service, String key, FlowSegmentRequest request) { service.handleAsyncResponse(key, SpeakerFlowSegmentResponse.builder() .messageContext(request.getMessageContext()) .metadata(request.getMetadata()) .commandId(request.getCommandId()) .switchId(request.getSwitchId()) .success(true) .build()); } private void handleErrorResponse( FlowCreateService service, String key, FlowSegmentRequest request, ErrorCode errorCode) { service.handleAsyncResponse(key, FlowErrorResponse.errorBuilder() .messageContext(request.getMessageContext()) .metadata(request.getMetadata()) .commandId(request.getCommandId()) .switchId(request.getSwitchId()) .errorCode(errorCode) .build()); } private void preparePathComputation(String flowId, GetPathsResult pathPair) throws RecoverableException, UnroutableFlowException { when(pathComputer.getPath(makeFlowArgumentMatch(flowId))).thenReturn(pathPair); } private FlowCreateService makeService() { return makeService(0); } private FlowCreateService makeService(int retriesLimit) { return new FlowCreateService(carrier, persistenceManager, pathComputer, flowResourcesManager, 0, 3, 0, retriesLimit); } }
apache-2.0
LABELNET/YuanNewsForAndroid
yuannews/src/main/java/cn/edu/hpu/yuan/yuannews/main/data/model/basevo/LikedVo.java
677
package cn.edu.hpu.yuan.yuannews.main.data.model.basevo; import cn.edu.hpu.yuan.yuannews.main.data.model.base.Liked; /** * Created by yuan on 16-4-3. * 留言拓展类 */ public class LikedVo extends Liked { private Integer uid; private String head; private String nick; public Integer getUid() { return uid; } public void setUid(Integer uid) { this.uid = uid; } public String getHead() { return head; } public void setHead(String head) { this.head = head; } public String getNick() { return nick; } public void setNick(String nick) { this.nick = nick; } }
apache-2.0
AutomationRockstars/Design
design/gir/src/main/java/com/automationrockstars/bmo/DownloadFromSeleniumGrid.java
1824
/* * <!-- * Copyright (c) 2015-2019 Automation RockStars Ltd. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 * which accompanies this distribution, and is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Contributors: * Automation RockStars * --> */ package com.automationrockstars.bmo; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import java.io.File; import java.io.IOException; import java.util.concurrent.Callable; public class DownloadFromSeleniumGrid implements Callable<File> { private static CloseableHttpClient cl; private final String link; public DownloadFromSeleniumGrid(String link) { this.link = link; } private static boolean canGetVideo(final String link) { CloseableHttpResponse resp = null; try { resp = cl.execute(new HttpGet(link)); if (resp.getStatusLine().getStatusCode() != 200) { throw new IllegalArgumentException("Negative response from server " + resp.getStatusLine()); } return true; } catch (Throwable t) { //LOG.debug("Video {} cannot be fetched due to {}",link,t.getMessage()); return false; } finally { if (resp != null) { try { resp.close(); } catch (IOException ignore) { } } } } CloseableHttpClient client() { return cl; } @Override public File call() throws Exception { // TODO Auto-generated method stub return null; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-clouddirectory/src/main/java/com/amazonaws/services/clouddirectory/model/transform/BatchListPolicyAttachmentsResponseJsonUnmarshaller.java
3307
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.clouddirectory.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.clouddirectory.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * BatchListPolicyAttachmentsResponse JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class BatchListPolicyAttachmentsResponseJsonUnmarshaller implements Unmarshaller<BatchListPolicyAttachmentsResponse, JsonUnmarshallerContext> { public BatchListPolicyAttachmentsResponse unmarshall(JsonUnmarshallerContext context) throws Exception { BatchListPolicyAttachmentsResponse batchListPolicyAttachmentsResponse = new BatchListPolicyAttachmentsResponse(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("ObjectIdentifiers", targetDepth)) { context.nextToken(); batchListPolicyAttachmentsResponse.setObjectIdentifiers(new ListUnmarshaller<String>(context.getUnmarshaller(String.class)) .unmarshall(context)); } if (context.testExpression("NextToken", targetDepth)) { context.nextToken(); batchListPolicyAttachmentsResponse.setNextToken(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return batchListPolicyAttachmentsResponse; } private static BatchListPolicyAttachmentsResponseJsonUnmarshaller instance; public static BatchListPolicyAttachmentsResponseJsonUnmarshaller getInstance() { if (instance == null) instance = new BatchListPolicyAttachmentsResponseJsonUnmarshaller(); return instance; } }
apache-2.0
sjaco002/incubator-asterixdb
asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
1273
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.uci.ics.asterix.runtime.operators.file; import edu.uci.ics.asterix.om.types.ARecordType; import edu.uci.ics.hyracks.api.context.IHyracksTaskContext; import edu.uci.ics.hyracks.api.exceptions.HyracksDataException; /** * An extension of AbstractTupleParser that provides functionality for * parsing delimited files. */ public class AdmTupleParser extends AbstractTupleParser { public AdmTupleParser(IHyracksTaskContext ctx, ARecordType recType) throws HyracksDataException { super(ctx, recType); } @Override public IDataParser getDataParser() { return new ADMDataParser(filename); } }
apache-2.0
cchacin/metrics-cdi
impl/src/main/java/io/astefanutti/metrics/cdi/MetricsExtension.java
5184
/** * Copyright (C) 2013 Antonin Stefanutti (antonin.stefanutti@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.astefanutti.metrics.cdi; import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.annotation.CachedGauge; import com.codahale.metrics.annotation.Counted; import com.codahale.metrics.annotation.ExceptionMetered; import com.codahale.metrics.annotation.Gauge; import com.codahale.metrics.annotation.Metered; import com.codahale.metrics.annotation.Timed; import javax.enterprise.event.Observes; import javax.enterprise.inject.spi.AfterBeanDiscovery; import javax.enterprise.inject.spi.AfterDeploymentValidation; import javax.enterprise.inject.spi.AnnotatedMember; import javax.enterprise.inject.spi.AnnotatedMethod; import javax.enterprise.inject.spi.AnnotatedType; import javax.enterprise.inject.spi.Bean; import javax.enterprise.inject.spi.BeanManager; import javax.enterprise.inject.spi.BeforeBeanDiscovery; import javax.enterprise.inject.spi.Extension; import javax.enterprise.inject.spi.ProcessAnnotatedType; import javax.enterprise.inject.spi.ProcessProducerField; import javax.enterprise.inject.spi.ProcessProducerMethod; import javax.enterprise.inject.spi.WithAnnotations; import javax.enterprise.util.AnnotationLiteral; import javax.enterprise.util.Nonbinding; import javax.interceptor.InterceptorBinding; import java.lang.annotation.Annotation; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; public class MetricsExtension implements Extension { private static final AnnotationLiteral<Nonbinding> NON_BINDING = new AnnotationLiteral<Nonbinding>(){}; private static final AnnotationLiteral<InterceptorBinding> INTERCEPTOR_BINDING = new AnnotationLiteral<InterceptorBinding>(){}; private static final AnnotationLiteral<MetricsBinding> METRICS_BINDING = new AnnotationLiteral<MetricsBinding>(){}; private final Map<Bean<?>, AnnotatedMember<?>> metrics = new HashMap<>(); private void addInterceptorBindings(@Observes BeforeBeanDiscovery bbd, BeanManager manager) { declareAsInterceptorBinding(Counted.class, manager, bbd); declareAsInterceptorBinding(ExceptionMetered.class, manager, bbd); declareAsInterceptorBinding(Metered.class, manager, bbd); declareAsInterceptorBinding(Timed.class, manager, bbd); } private <X> void metricsAnnotations(@Observes @WithAnnotations({CachedGauge.class, Counted.class, ExceptionMetered.class, Gauge.class, Metered.class, Timed.class}) ProcessAnnotatedType<X> pat) { pat.setAnnotatedType(new AnnotatedTypeDecorator<>(pat.getAnnotatedType(), METRICS_BINDING)); } private void metricProducerField(@Observes ProcessProducerField<? extends Metric, ?> ppf) { metrics.put(ppf.getBean(), ppf.getAnnotatedProducerField()); } private void metricProducerMethod(@Observes ProcessProducerMethod<? extends Metric, ?> ppm) { // Skip the Metrics CDI alternatives if (!ppm.getBean().getBeanClass().equals(MetricProducer.class)) metrics.put(ppm.getBean(), ppm.getAnnotatedProducerMethod()); } private void defaultMetricRegistry(@Observes AfterBeanDiscovery abd, BeanManager manager) { if (manager.getBeans(MetricRegistry.class).isEmpty()) abd.addBean(new MetricRegistryBean(manager)); } private void customMetrics(@Observes AfterDeploymentValidation adv, BeanManager manager) { MetricProducer producer = getBeanInstance(manager, MetricProducer.class); for (Map.Entry<Bean<?>, AnnotatedMember<?>> metric : metrics.entrySet()) producer.produceMetric(manager, metric.getKey(), metric.getValue()); // Let's clear the collected metric producers metrics.clear(); } private static <T extends Annotation> void declareAsInterceptorBinding(Class<T> annotation, BeanManager manager, BeforeBeanDiscovery bbd) { AnnotatedType<T> annotated = manager.createAnnotatedType(annotation); Set<AnnotatedMethod<? super T>> methods = new HashSet<>(); for (AnnotatedMethod<? super T> method : annotated.getMethods()) methods.add(new AnnotatedMethodDecorator<>(method, NON_BINDING)); bbd.addInterceptorBinding(new AnnotatedTypeDecorator<>(annotated, INTERCEPTOR_BINDING, methods)); } @SuppressWarnings("unchecked") private static <T> T getBeanInstance(BeanManager manager, Class<T> clazz) { Bean<?> bean = manager.resolve(manager.getBeans(clazz)); return (T) manager.getReference(bean, clazz, manager.createCreationalContext(null)); } }
apache-2.0
WASdev/sample.daytrader7
daytrader-ee7-web/src/main/java/com/ibm/websphere/samples/daytrader/web/prims/PingServlet2Jsp.java
2717
/** * (C) Copyright IBM Corporation 2015. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.websphere.samples.daytrader.web.prims; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.ibm.websphere.samples.daytrader.util.Log; /** * * PingServlet2JSP tests a call from a servlet to a JavaServer Page providing * server-side dynamic HTML through JSP scripting. * */ @WebServlet(name = "PingServlet2Jsp", urlPatterns = { "/servlet/PingServlet2Jsp" }) public class PingServlet2Jsp extends HttpServlet { private static final long serialVersionUID = -5199543766883932389L; private static int hitCount = 0; /** * forwards post requests to the doGet method Creation date: (11/6/2000 * 10:52:39 AM) * * @param res * javax.servlet.http.HttpServletRequest * @param res2 * javax.servlet.http.HttpServletResponse */ @Override public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { doGet(req, res); } /** * this is the main method of the servlet that will service all get * requests. * * @param request * HttpServletRequest * @param responce * HttpServletResponce **/ @Override public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { PingBean ab; try { ab = new PingBean(); hitCount++; ab.setMsg("Hit Count: " + hitCount); req.setAttribute("ab", ab); getServletConfig().getServletContext().getRequestDispatcher("/PingServlet2Jsp.jsp").forward(req, res); } catch (Exception ex) { Log.error(ex, "PingServlet2Jsp.doGet(...): request error"); res.sendError(500, "PingServlet2Jsp.doGet(...): request error" + ex.toString()); } } }
apache-2.0
Taller/sqlworkbench-plus
src/workbench/gui/actions/FileSaveAsAction.java
1404
/* * FileSaveAsAction.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.gui.actions; import java.awt.event.ActionEvent; import workbench.interfaces.TextFileContainer; import workbench.resource.ResourceMgr; /** * Save the current file in the SQL Editor with a new name. * @author Thomas Kellerer */ public class FileSaveAsAction extends WbAction { private TextFileContainer client; public FileSaveAsAction(TextFileContainer aClient) { super(); this.client = aClient; this.initMenuDefinition("MnuTxtFileSaveAs"); this.setMenuItemName(ResourceMgr.MNU_TXT_FILE); } @Override public void executeAction(ActionEvent e) { this.client.saveFile(); } }
apache-2.0
arrayexpress/annotare2
app/integration/ae-subs-tracking/src/main/java/uk/ac/ebi/fg/annotare2/autosubs/SubsTracking.java
17427
/* * Copyright 2009-2016 European Molecular Biology Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package uk.ac.ebi.fg.annotare2.autosubs; import com.google.common.primitives.Ints; import com.google.inject.Inject; import com.zaxxer.hikari.HikariDataSource; import org.apache.commons.lang3.RandomStringUtils; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.jooq.conf.Settings; import org.jooq.impl.DSL; import org.jooq.impl.DefaultConnectionProvider; import uk.ac.ebi.fg.annotare2.autosubs.jooq.tables.records.DataFilesRecord; import uk.ac.ebi.fg.annotare2.autosubs.jooq.tables.records.ExperimentsRecord; import uk.ac.ebi.fg.annotare2.autosubs.jooq.tables.records.SpreadsheetsRecord; import uk.ac.ebi.fg.annotare2.autosubs.jooq.tables.records.UsersRecord; import uk.ac.ebi.fg.annotare2.db.model.ExperimentSubmission; import uk.ac.ebi.fg.annotare2.db.model.ImportedExperimentSubmission; import uk.ac.ebi.fg.annotare2.db.model.Submission; import uk.ac.ebi.fg.annotare2.db.model.enums.SubmissionStatus; import uk.ac.ebi.fg.annotare2.submission.transform.DataSerializationException; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import java.io.UnsupportedEncodingException; import java.sql.Connection; import java.sql.SQLException; import java.sql.Timestamp; import java.util.Date; import static com.google.common.base.Strings.isNullOrEmpty; import static uk.ac.ebi.fg.annotare2.autosubs.jooq.Tables.*; public class SubsTracking { private final SubsTrackingProperties properties; private HikariDataSource ds; private final static String STATUS_PENDING = "Waiting"; @Inject public SubsTracking( SubsTrackingProperties properties ) { this.properties = properties; ds = null; } public Connection getConnection() throws SubsTrackingException { if (null == ds) { throw new SubsTrackingException("Unable to obtain a connection; pool has not been initialized"); } try { return ds.getConnection(); } catch (SQLException e) { throw new SubsTrackingException(e); } } public void releaseConnection(Connection connection) throws SubsTrackingException { try { if (null != connection && !connection.isClosed()) { connection.close(); } } catch (SQLException e) { throw new SubsTrackingException(e); } } public Integer addSubmission(Connection connection, Submission submission) throws SubsTrackingException { Integer subsTrackingId = null; DSLContext context = getContext(connection); if (submission instanceof ExperimentSubmission) { try { Integer userId = getAnnotareUserId(context); ExperimentsRecord r = context.insertInto(EXPERIMENTS) .set(EXPERIMENTS.IS_DELETED, 0) .set(EXPERIMENTS.IN_CURATION, 0) .set(EXPERIMENTS.USER_ID, userId) .set(EXPERIMENTS.DATE_SUBMITTED, new Timestamp(new Date().getTime())) .set(EXPERIMENTS.ACCESSION, submission.getAccession()) .set(EXPERIMENTS.NAME, trimStringToSize(asciiCompliantString(submission.getTitle()), 255)) .set(EXPERIMENTS.SUBMITTER_DESCRIPTION, asciiCompliantString(((ExperimentSubmission) submission).getExperimentProfile().getDescription())) .set(EXPERIMENTS.EXPERIMENT_TYPE, properties.getSubsTrackingExperimentType()) .set(EXPERIMENTS.IS_UHTS, ((ExperimentSubmission) submission).getExperimentProfile().getType().isSequencing() ? 1 : 0) .set(EXPERIMENTS.NUM_SUBMISSIONS, 1) .set(EXPERIMENTS.ANNOTARE_SUBMISSION_ID, Ints.checkedCast(submission.getId())) .returning(EXPERIMENTS.ID) .fetchOne(); if (null != r) { subsTrackingId = r.getId(); } } catch (DataSerializationException e) { throw new SubsTrackingException(e); } } else if (submission instanceof ImportedExperimentSubmission) { Integer userId = getAnnotareUserId(context); ExperimentsRecord r = context.insertInto(EXPERIMENTS) .set(EXPERIMENTS.IS_DELETED, 0) .set(EXPERIMENTS.IN_CURATION, 0) .set(EXPERIMENTS.USER_ID, userId) .set(EXPERIMENTS.DATE_SUBMITTED, new Timestamp(new Date().getTime())) .set(EXPERIMENTS.ACCESSION, submission.getAccession()) .set(EXPERIMENTS.NAME, asciiCompliantString("")) .set(EXPERIMENTS.SUBMITTER_DESCRIPTION, asciiCompliantString("")) .set(EXPERIMENTS.EXPERIMENT_TYPE, properties.getSubsTrackingExperimentType()) .set(EXPERIMENTS.IS_UHTS, 0) .set(EXPERIMENTS.NUM_SUBMISSIONS, 1) .set(EXPERIMENTS.ANNOTARE_SUBMISSION_ID, Ints.checkedCast(submission.getId())) .returning(EXPERIMENTS.ID) .fetchOne(); if (null != r) { subsTrackingId = r.getId(); } } else { throw new SubsTrackingException(SubsTrackingException.NOT_IMPLEMENTED_EXCEPTION); } return subsTrackingId; } public void updateSubmission(Connection connection, Submission submission) throws SubsTrackingException { if (submission instanceof ExperimentSubmission) { try { Timestamp updateDate = new Timestamp(new Date().getTime()); ExperimentsRecord r = getContext(connection).selectFrom(EXPERIMENTS) .where(EXPERIMENTS.ID.equal(submission.getSubsTrackingId())) .fetchOne(); if (null == r || 1 == r.getIsDeleted()) { throw new SubsTrackingException(SubsTrackingException.MISSING_RECORD_EXCEPTION); } if (SubmissionStatus.RESUBMITTED != submission.getStatus() && 1 == r.getInCuration()) { throw new SubsTrackingException(SubsTrackingException.IN_CURATION_ON_RESUBMISSION_EXCEPTION + ". Submission ID: " + submission.getId()); } Integer numSubmissions = null == r.getNumSubmissions() ? 1 : r.getNumSubmissions(); getContext(connection).update(EXPERIMENTS) .set(EXPERIMENTS.DATE_LAST_EDITED, updateDate) .set(EXPERIMENTS.DATE_SUBMITTED, updateDate) .set(EXPERIMENTS.NAME, trimStringToSize(asciiCompliantString(submission.getTitle()), 255)) .set(EXPERIMENTS.SUBMITTER_DESCRIPTION, asciiCompliantString(((ExperimentSubmission) submission).getExperimentProfile().getDescription())) .set(EXPERIMENTS.EXPERIMENT_TYPE, properties.getSubsTrackingExperimentType()) .set(EXPERIMENTS.NUM_SUBMISSIONS, numSubmissions + 1) .set(EXPERIMENTS.ANNOTARE_SUBMISSION_ID, Ints.checkedCast(submission.getId())) .where(EXPERIMENTS.ID.equal(submission.getSubsTrackingId())) .execute(); } catch (DataSerializationException e) { throw new SubsTrackingException(e); } } else { throw new SubsTrackingException(SubsTrackingException.NOT_IMPLEMENTED_EXCEPTION); } } public void sendSubmission(Connection connection, Integer subsTrackingId) throws SubsTrackingException { if (null == subsTrackingId) { throw new SubsTrackingException(SubsTrackingException.INVALID_ID_EXCEPTION); } getContext(connection).update(EXPERIMENTS) .set(EXPERIMENTS.STATUS, STATUS_PENDING) .set(EXPERIMENTS.IN_CURATION, 1) .where(EXPERIMENTS.ID.equal(subsTrackingId)) .execute(); } public void deleteFiles(Connection connection, Integer subsTrackingId) throws SubsTrackingException { if (null == subsTrackingId) { throw new SubsTrackingException(SubsTrackingException.INVALID_ID_EXCEPTION); } DSLContext context = getContext(connection); context.update(SPREADSHEETS) .set(SPREADSHEETS.IS_DELETED, 1) .where(SPREADSHEETS.EXPERIMENT_ID.equal(subsTrackingId)) .execute(); context.update(DATA_FILES) .set(DATA_FILES.IS_DELETED, 1) .where(DATA_FILES.EXPERIMENT_ID.equal(subsTrackingId)) .execute(); } public Integer addMageTabFile(Connection connection, Integer subsTrackingId, String fileName) throws SubsTrackingException { if (null == subsTrackingId) { throw new SubsTrackingException(SubsTrackingException.INVALID_ID_EXCEPTION); } Integer spreadsheetId = null; SpreadsheetsRecord r = getContext(connection).insertInto(SPREADSHEETS) .set(SPREADSHEETS.IS_DELETED, 0) .set(SPREADSHEETS.EXPERIMENT_ID, subsTrackingId) .set(SPREADSHEETS.NAME, fileName) .returning(SPREADSHEETS.ID) .fetchOne(); if (null != r) { spreadsheetId = r.getId(); } return spreadsheetId; } public boolean hasMageTabFileAdded(Connection connection, Integer subsTrackingId, String fileName) throws SubsTrackingException { if (null == subsTrackingId) { throw new SubsTrackingException(SubsTrackingException.INVALID_ID_EXCEPTION); } Integer count = getContext(connection).selectCount() .from(SPREADSHEETS) .where(SPREADSHEETS.EXPERIMENT_ID.equal(subsTrackingId) .and(SPREADSHEETS.NAME.equal(fileName))) .fetchOne(0, Integer.class); return (count > 0); } public Integer addDataFile(Connection connection, Integer subsTrackingId, String fileName) throws SubsTrackingException { if (null == subsTrackingId) { throw new SubsTrackingException(SubsTrackingException.INVALID_ID_EXCEPTION); } Integer dataFileId = null; DataFilesRecord r = getContext(connection).insertInto(DATA_FILES) .set(DATA_FILES.IS_DELETED, 0) .set(DATA_FILES.IS_UNPACKED, 1) .set(DATA_FILES.EXPERIMENT_ID, subsTrackingId) .set(DATA_FILES.NAME, fileName) .returning(DATA_FILES.ID) .fetchOne(); if (null != r) { dataFileId = r.getId(); } return dataFileId; } public boolean isInCuration(Connection connection, Integer subsTrackingId) throws SubsTrackingException { if (null == subsTrackingId) { throw new SubsTrackingException(SubsTrackingException.INVALID_ID_EXCEPTION); } ExperimentsRecord r = getContext(connection).selectFrom(EXPERIMENTS) .where(EXPERIMENTS.ID.equal(subsTrackingId).and(EXPERIMENTS.IS_DELETED.equal(0))) .fetchOne(); if (null == r) { throw new SubsTrackingException(SubsTrackingException.MISSING_RECORD_EXCEPTION); } return 1 == r.getInCuration(); } public String getAccession(Connection connection, Integer subsTrackingId) throws SubsTrackingException { if (null == subsTrackingId) { throw new SubsTrackingException(SubsTrackingException.INVALID_ID_EXCEPTION); } ExperimentsRecord r = getContext(connection).selectFrom(EXPERIMENTS) .where(EXPERIMENTS.ID.equal(subsTrackingId).and(EXPERIMENTS.IS_DELETED.equal(0))) .fetchOne(); if (null == r) { throw new SubsTrackingException(SubsTrackingException.MISSING_RECORD_EXCEPTION); } return r.getAccession(); } private Integer getAnnotareUserId(DSLContext context) throws SubsTrackingException { String subsTrackingUser = properties.getSubsTrackingUser(); if (isNullOrEmpty(subsTrackingUser)) { throw new SubsTrackingException(SubsTrackingException.USER_NOT_CONFIGURED_EXCEPTION); } UsersRecord r = context.selectFrom(USERS) .where(USERS.LOGIN.equal(subsTrackingUser)) .and(USERS.IS_DELETED.equal(0)) .fetchOne(); if (null == r) { // here we create the user r = context.insertInto(USERS) .set(USERS.LOGIN, subsTrackingUser) .set(USERS.PASSWORD, RandomStringUtils.randomAlphanumeric(16)) .set(USERS.IS_DELETED, 0) .returning(USERS.ID) .fetchOne(); } return ( null != r ) ? r.getId() : null; } private DSLContext getContext(Connection connection) throws SubsTrackingException { if (properties.isSubsTrackingEnabled()) { try { Settings settings = new Settings() .withRenderSchema(false); return DSL.using(new DefaultConnectionProvider(connection), SQLDialect.MYSQL, settings); } catch (Exception e) { throw new SubsTrackingException(e); } } else { return null; } } @PostConstruct public void initialize() throws SubsTrackingException { if (null != ds) { throw new SubsTrackingException(SubsTrackingException.ILLEGAL_REPEAT_INITIALIZATION); } if (properties.isSubsTrackingEnabled()) { try { Class.forName(properties.getSubsTrackingConnectionDriverClass()); } catch (ClassNotFoundException x) { String message = "Unable to load driver [" + properties.getSubsTrackingConnectionDriverClass() + "] for SubsTrackingDB"; throw new SubsTrackingException(message); } ds = new HikariDataSource(); ds.setPoolName("SubsTrackingDB-Pool"); ds.setDriverClassName(properties.getSubsTrackingConnectionDriverClass()); ds.setJdbcUrl(properties.getSubsTrackingConnectionUrl()); ds.setUsername(properties.getSubsTrackingConnectionUser()); ds.setPassword(properties.getSubsTrackingConnectionPassword()); ds.setConnectionTestQuery("SELECT 1"); ds.addDataSourceProperty("ds.cachePrepStmts", "true"); ds.addDataSourceProperty("ds.prepStmtCacheSize", "250"); ds.addDataSourceProperty("ds.prepStmtCacheSqlLimit", "2048"); ds.addDataSourceProperty("ds.useServerPrepStmts", "true"); Connection test = null; try { test = ds.getConnection(); } catch (SQLException x) { throw new SubsTrackingException("Unable o establish a connection to subs tracking DB", x); } finally { if (null != test) { try { test.close(); } catch (SQLException x) { // } } } } } @PreDestroy public void terminate() throws SubsTrackingException { if (null != ds) { ds.close(); ds = null; } } private String asciiCompliantString(String s) { try { if (null != s) { byte[] b = s.getBytes("US-ASCII"); return new String(b, "US-ASCII"); } } catch (UnsupportedEncodingException x) { // } return null; } private String trimStringToSize(String s, int index) { if (null != s) { return s.substring(0, Math.min(s.length(), index)); } return null; } }
apache-2.0
xtwxy/actor-editor
plugins/com.wincom.actor.editor.test2/src/com/wincom/actor/editor/test2/model/ProvidedPortModel.java
2400
package com.wincom.actor.editor.test2.model; import java.util.ArrayList; import java.util.List; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.ui.views.properties.IPropertyDescriptor; import org.eclipse.ui.views.properties.PropertyDescriptor; import org.eclipse.ui.views.properties.TextPropertyDescriptor; public class ProvidedPortModel extends ElementModel { private static final long serialVersionUID = 2969510135967969883L; public static final String INPUTS = "inputs"; public static final String OUTPUT = "output"; private ConnectionModel output; private List<ConnectionModel> inputs = new ArrayList<>(); private static final IPropertyDescriptor[] descriptors = new IPropertyDescriptor[] { new TextPropertyDescriptor(NAME, NAME), new TextPropertyDescriptor(PARENT, PARENT), new TextPropertyDescriptor(OUTPUT, OUTPUT), new PropertyDescriptor(INPUTS, INPUTS) }; public ProvidedPortModel() { setLayout(new Rectangle(10, 10, 100, 50)); } @Override public IPropertyDescriptor[] getPropertyDescriptors() { return descriptors; } @Override public Object getPropertyValue(Object id) { if(INPUTS.equals(id)) { return inputs; } else if(OUTPUT.equals(id)) { return output; } return super.getPropertyValue(id); } @Override public boolean isPropertySet(Object id) { return getPropertyValue(id) != null; } @SuppressWarnings("unchecked") @Override public void setPropertyValue(Object id, Object value) { if(INPUTS.equals(id)) { setInputs((List<ConnectionModel>) value); } else if(OUTPUT.equals(id)) { setOutput((ConnectionModel) value); } else { super.setPropertyValue(id, value); } } public ConnectionModel getOutput() { return output; } public void setOutput(ConnectionModel newOutput) { ConnectionModel old = output; this.output = newOutput; firePropertyChange(OUTPUT, old, newOutput); } public void addInput(ConnectionModel conn) { this.inputs.add(conn); firePropertyChange(INPUTS, null, conn); } public List<ConnectionModel> getInputs() { return inputs; } public void setInputs(List<ConnectionModel> inConn) { List<ConnectionModel> old = this.inputs; this.inputs = inConn; firePropertyChange(INPUTS, old, inConn); } @Override public void resetPropertyValue(Object id) { } @Override public List<ElementModel> getChildren() { return new ArrayList<>(); } }
apache-2.0
oplinkoms/onos
apps/t3/app/src/main/java/org/onosproject/t3/impl/TroubleshootManager.java
68520
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.t3.impl; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.onlab.packet.IpAddress; import org.onlab.packet.VlanId; import org.onosproject.cluster.NodeId; import org.onosproject.net.ConnectPoint; import org.onosproject.net.DeviceId; import org.onosproject.net.Host; import org.onosproject.net.HostId; import org.onosproject.net.Link; import org.onosproject.net.Port; import org.onosproject.net.PortNumber; import org.onosproject.net.config.ConfigException; import org.onosproject.net.config.basics.InterfaceConfig; import org.onosproject.net.flow.DefaultTrafficSelector; import org.onosproject.net.flow.FlowEntry; import org.onosproject.net.flow.FlowRule; import org.onosproject.net.flow.IndexTableId; import org.onosproject.net.flow.TableId; import org.onosproject.net.flow.TrafficSelector; import org.onosproject.net.flow.criteria.Criteria; import org.onosproject.net.flow.criteria.Criterion; import org.onosproject.net.flow.criteria.EthCriterion; import org.onosproject.net.flow.criteria.EthTypeCriterion; import org.onosproject.net.flow.criteria.IPCriterion; import org.onosproject.net.flow.criteria.VlanIdCriterion; import org.onosproject.net.flow.instructions.Instruction; import org.onosproject.net.flow.instructions.Instructions; import org.onosproject.net.flow.instructions.Instructions.OutputInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction; import org.onosproject.net.group.Group; import org.onosproject.net.group.GroupBucket; import org.onosproject.net.host.InterfaceIpAddress; import org.onosproject.net.intf.Interface; import org.onosproject.routeservice.ResolvedRoute; import org.onosproject.segmentrouting.config.SegmentRoutingDeviceConfig; import org.onosproject.t3.api.DeviceNib; import org.onosproject.t3.api.DriverNib; import org.onosproject.t3.api.EdgePortNib; import org.onosproject.t3.api.FlowNib; import org.onosproject.t3.api.GroupNib; import org.onosproject.t3.api.GroupsInDevice; import org.onosproject.t3.api.HostNib; import org.onosproject.t3.api.LinkNib; import org.onosproject.t3.api.MastershipNib; import org.onosproject.t3.api.MulticastRouteNib; import org.onosproject.t3.api.NetworkConfigNib; import org.onosproject.t3.api.NibProfile; import org.onosproject.t3.api.RouteNib; import org.onosproject.t3.api.StaticPacketTrace; import org.onosproject.t3.api.TroubleshootService; import org.osgi.service.component.annotations.Component; import org.slf4j.Logger; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static org.onlab.packet.EthType.EtherType; import static org.onosproject.net.flow.TrafficSelector.Builder; import static org.onosproject.net.flow.instructions.Instructions.GroupInstruction; import static org.onosproject.net.flow.instructions.L2ModificationInstruction.ModEtherInstruction; import static org.onosproject.net.flow.instructions.L2ModificationInstruction.ModMplsHeaderInstruction; import static org.onosproject.net.flow.instructions.L2ModificationInstruction.ModMplsLabelInstruction; import static org.onosproject.net.flow.instructions.L2ModificationInstruction.ModVlanIdInstruction; import static org.onosproject.t3.impl.TroubleshootUtils.compareMac; import static org.slf4j.LoggerFactory.getLogger; /** * Manager to troubleshoot packets inside the network. * Given a representation of a packet follows it's path in the network according to the existing flows and groups in * the devices. */ @Component(immediate = true, service = TroubleshootService.class) public class TroubleshootManager implements TroubleshootService { private static final Logger log = getLogger(TroubleshootManager.class); static final String PACKET_TO_CONTROLLER = "Packet goes to the controller"; // uses a snapshot (cache) of NIBs instead of interacting with ONOS core in runtime protected FlowNib flowNib = FlowNib.getInstance(); protected GroupNib groupNib = GroupNib.getInstance(); protected LinkNib linkNib = LinkNib.getInstance(); protected HostNib hostNib = HostNib.getInstance(); protected DeviceNib deviceNib = DeviceNib.getInstance(); protected DriverNib driverNib = DriverNib.getInstance(); protected MastershipNib mastershipNib = MastershipNib.getInstance(); protected EdgePortNib edgePortNib = EdgePortNib.getInstance(); protected RouteNib routeNib = RouteNib.getInstance(); protected NetworkConfigNib networkConfigNib = NetworkConfigNib.getInstance(); protected MulticastRouteNib mcastRouteNib = MulticastRouteNib.getInstance(); @Override public boolean checkNibValidity() { return Stream.of(flowNib, groupNib, linkNib, hostNib, deviceNib, driverNib, mastershipNib, edgePortNib, routeNib, networkConfigNib, mcastRouteNib) .allMatch(nib -> nib != null && nib.isValid()); } @Override public String printNibSummary() { StringBuilder summary = new StringBuilder().append("*** Current NIB in valid: ***\n"); Stream.of(flowNib, groupNib, linkNib, hostNib, deviceNib, driverNib, mastershipNib, edgePortNib, routeNib, networkConfigNib, mcastRouteNib) .forEach(nib -> { NibProfile profile = nib.getProfile(); summary.append(String.format( nib.getClass().getName() + " created %s from %s\n", profile.date(), profile.sourceType())); }); return summary.append(StringUtils.rightPad("", 125, '-')).toString(); } @Override public List<StaticPacketTrace> pingAll(EtherType type) { ImmutableList.Builder<StaticPacketTrace> tracesBuilder = ImmutableList.builder(); hostNib.getHosts().forEach(host -> { List<IpAddress> ipAddresses = getIpAddresses(host, type, false); if (ipAddresses.size() > 0) { //check if the host has only local IPs of that ETH type boolean onlyLocalSrc = ipAddresses.size() == 1 && ipAddresses.get(0).isLinkLocal(); hostNib.getHosts().forEach(hostToPing -> { List<IpAddress> ipAddressesToPing = getIpAddresses(hostToPing, type, false); //check if the other host has only local IPs of that ETH type boolean onlyLocalDst = ipAddressesToPing.size() == 1 && ipAddressesToPing.get(0).isLinkLocal(); boolean sameLocation = Sets.intersection(host.locations(), hostToPing.locations()).size() > 0; //Trace is done only if they are both local and under the same location // or not local and if they are not the same host. if (((sameLocation && onlyLocalDst && onlyLocalSrc) || (!onlyLocalSrc && !onlyLocalDst && ipAddressesToPing.size() > 0)) && !host.equals(hostToPing)) { tracesBuilder.addAll(trace(host.id(), hostToPing.id(), type)); } }); } }); return tracesBuilder.build(); } @Override public Generator<Set<StaticPacketTrace>> pingAllGenerator(EtherType type) { return new PingAllGenerator(type, hostNib, this); } @Override public Generator<Set<StaticPacketTrace>> traceMcast(VlanId vlanId) { return new McastGenerator(mcastRouteNib, this, vlanId); } @Override public Set<StaticPacketTrace> trace(HostId sourceHost, HostId destinationHost, EtherType etherType) { Host source = hostNib.getHost(sourceHost); Host destination = hostNib.getHost(destinationHost); //Temporary trace to fail in case we don't have enough information or what is provided is incoherent StaticPacketTrace failTrace = new StaticPacketTrace(null, null, Pair.of(source, destination)); if (source == null) { failTrace.addResultMessage("Source Host " + sourceHost + " does not exist"); failTrace.setSuccess(false); return ImmutableSet.of(failTrace); } if (destination == null) { failTrace.addResultMessage("Destination Host " + destinationHost + " does not exist"); failTrace.setSuccess(false); return ImmutableSet.of(failTrace); } TrafficSelector.Builder selectorBuilder = DefaultTrafficSelector.builder() .matchEthType(etherType.ethType().toShort()) .matchEthDst(source.mac()) .matchVlanId(source.vlan()); try { ImmutableSet.Builder<StaticPacketTrace> traces = ImmutableSet.builder(); //if the location deviceId is the same, the two hosts are under same subnet and vlan on the interface // we are under same leaf so it's L2 Unicast. if (areBridged(source, destination)) { selectorBuilder.matchEthDst(destination.mac()); source.locations().forEach(hostLocation -> { selectorBuilder.matchInPort(hostLocation.port()); StaticPacketTrace trace = trace(selectorBuilder.build(), hostLocation); trace.addEndpointHosts(Pair.of(source, destination)); traces.add(trace); }); //The destination host is not dual homed, if it is the other path might be done through routing. if (destination.locations().size() == 1) { return traces.build(); } } //handle the IPs for src and dst in case of L3 if (etherType.equals(EtherType.IPV4) || etherType.equals(EtherType.IPV6)) { //Match on the source IP if (!matchIP(source, failTrace, selectorBuilder, etherType, true)) { return ImmutableSet.of(failTrace); } //Match on destination IP if (!matchIP(destination, failTrace, selectorBuilder, etherType, false)) { return ImmutableSet.of(failTrace); } } else { failTrace.addResultMessage("Host based trace supports only IPv4 or IPv6 as EtherType, " + "please use packet based"); failTrace.setSuccess(false); return ImmutableSet.of(failTrace); } //l3 unicast, we get the dst mac of the leaf the source is connected to from netcfg SegmentRoutingDeviceConfig segmentRoutingConfig = networkConfigNib.getConfig(source.location() .deviceId(), SegmentRoutingDeviceConfig.class); if (segmentRoutingConfig != null) { selectorBuilder.matchEthDst(segmentRoutingConfig.routerMac()); } else { failTrace.addResultMessage("Can't get " + source.location().deviceId() + " router MAC from segment routing config can't perform L3 tracing."); failTrace.setSuccess(false); } source.locations().forEach(hostLocation -> { selectorBuilder.matchInPort(hostLocation.port()); StaticPacketTrace trace = trace(selectorBuilder.build(), hostLocation); trace.addEndpointHosts(Pair.of(source, destination)); traces.add(trace); }); return traces.build(); } catch (ConfigException e) { failTrace.addResultMessage("Can't get config " + e.getMessage()); return ImmutableSet.of(failTrace); } } /** * Matches src and dst IPs based on host information. * * @param host the host * @param failTrace the trace to use in case of failure * @param selectorBuilder the packet we are building to trace * @param etherType the traffic type * @param src is this src host or dst host * @return true if properly matched */ private boolean matchIP(Host host, StaticPacketTrace failTrace, Builder selectorBuilder, EtherType etherType, boolean src) { List<IpAddress> ips = getIpAddresses(host, etherType, true); if (ips.size() > 0) { if (etherType.equals(EtherType.IPV4)) { if (src) { selectorBuilder.matchIPSrc(ips.get(0).toIpPrefix()); } else { selectorBuilder.matchIPDst(ips.get(0).toIpPrefix()); } } else if (etherType.equals(EtherType.IPV6)) { if (src) { selectorBuilder.matchIPv6Src(ips.get(0).toIpPrefix()); } else { selectorBuilder.matchIPv6Dst(ips.get(0).toIpPrefix()); } } } else { failTrace.addResultMessage("Host " + host + " has no " + etherType + " address"); failTrace.setSuccess(false); return false; } return true; } List<IpAddress> getIpAddresses(Host host, EtherType etherType, boolean checklocal) { return host.ipAddresses().stream().filter(ipAddress -> { boolean correctIp = false; if (etherType.equals(EtherType.IPV4)) { correctIp = ipAddress.isIp4(); } else if (etherType.equals(EtherType.IPV6)) { correctIp = ipAddress.isIp6(); } if (checklocal) { correctIp = correctIp && !ipAddress.isLinkLocal(); } return correctIp; }).collect(Collectors.toList()); } /** * Checks that two hosts are bridged (L2Unicast). * * @param source the source host * @param destination the destination host * @return true if bridged. * @throws ConfigException if config can't be properly retrieved */ private boolean areBridged(Host source, Host destination) throws ConfigException { //If the locations is not the same we don't even check vlan or subnets if (Collections.disjoint(source.locations(), destination.locations())) { return false; } if (!source.vlan().equals(VlanId.NONE) && !destination.vlan().equals(VlanId.NONE) && !source.vlan().equals(destination.vlan())) { return false; } InterfaceConfig interfaceCfgH1 = networkConfigNib.getConfig(source.location(), InterfaceConfig.class); InterfaceConfig interfaceCfgH2 = networkConfigNib.getConfig(destination.location(), InterfaceConfig.class); if (interfaceCfgH1 != null && interfaceCfgH2 != null) { //following can be optimized but for clarity is left as is Interface intfH1 = interfaceCfgH1.getInterfaces().stream().findFirst().get(); Interface intfH2 = interfaceCfgH2.getInterfaces().stream().findFirst().get(); if (source.vlan().equals(VlanId.NONE) && !destination.vlan().equals(VlanId.NONE)) { return intfH1.vlanUntagged().equals(destination.vlan()) || intfH1.vlanNative().equals(destination.vlan()); } if (!source.vlan().equals(VlanId.NONE) && destination.vlan().equals(VlanId.NONE)) { return intfH2.vlanUntagged().equals(source.vlan()) || intfH2.vlanNative().equals(source.vlan()); } if (!intfH1.vlanNative().equals(intfH2.vlanNative())) { return false; } if (!intfH1.vlanUntagged().equals(intfH2.vlanUntagged())) { return false; } List<InterfaceIpAddress> intersection = new ArrayList<>(intfH1.ipAddressesList()); intersection.retainAll(intfH2.ipAddressesList()); if (intersection.size() == 0) { return false; } } return true; } @Override public StaticPacketTrace trace(TrafficSelector packet, ConnectPoint in) { log.info("Tracing packet {} coming in through {}", packet, in); //device must exist in ONOS Preconditions.checkNotNull(deviceNib.getDevice(in.deviceId()), "Device " + in.deviceId() + " must exist in ONOS"); StaticPacketTrace trace = new StaticPacketTrace(packet, in); boolean isDualHomed = getHosts(trace).stream().anyMatch(host -> host.locations().size() > 1); //FIXME this can be done recursively //Building output connect Points List<ConnectPoint> path = new ArrayList<>(); trace = traceInDevice(trace, packet, in, isDualHomed, path); trace = getTrace(path, in, trace, isDualHomed); return trace; } @Override public List<Set<StaticPacketTrace>> getMulitcastTrace(VlanId vlanId) { Generator<Set<StaticPacketTrace>> gen = new McastGenerator(mcastRouteNib, this, vlanId); List<Set<StaticPacketTrace>> multicastTraceList = StreamSupport.stream(gen.spliterator(), false).collect(Collectors.toList()); return multicastTraceList; } /** * Computes a trace for a give packet that start in the network at the given connect point. * * @param completePath the path traversed by the packet * @param in the input connect point * @param trace the trace to build * @param isDualHomed true if the trace we are doing starts or ends in a dual homed host * @return the build trace for that packet. */ private StaticPacketTrace getTrace(List<ConnectPoint> completePath, ConnectPoint in, StaticPacketTrace trace, boolean isDualHomed) { log.debug("------------------------------------------------------------"); //if the trace already contains the input connect point there is a loop if (pathContainsDevice(completePath, in.deviceId())) { trace.addResultMessage("Loop encountered in device " + in.deviceId()); completePath.add(in); trace.addCompletePath(completePath); trace.setSuccess(false); return trace; } //let's add the input connect point completePath.add(in); //If the trace has no outputs for the given input we stop here if (trace.getGroupOuputs(in.deviceId()) == null) { computePath(completePath, trace, null); trace.addResultMessage("No output out of device " + in.deviceId() + ". Packet is dropped"); trace.setSuccess(false); return trace; } //If the trace has outputs we analyze them all for (GroupsInDevice outputPath : trace.getGroupOuputs(in.deviceId())) { ConnectPoint cp = outputPath.getOutput(); log.debug("Connect point in {}", in); log.debug("Output path {}", cp); log.debug("{}", outputPath.getFinalPacket()); //Hosts for the the given output Set<Host> hostsList = hostNib.getConnectedHosts(cp); //Hosts queried from the original ip or mac Set<Host> hosts = getHosts(trace); if (in.equals(cp) && trace.getInitialPacket().getCriterion(Criterion.Type.VLAN_VID) != null && outputPath.getFinalPacket().getCriterion(Criterion.Type.VLAN_VID) != null && ((VlanIdCriterion) trace.getInitialPacket().getCriterion(Criterion.Type.VLAN_VID)).vlanId() .equals(((VlanIdCriterion) outputPath.getFinalPacket().getCriterion(Criterion.Type.VLAN_VID)) .vlanId())) { if (trace.getGroupOuputs(in.deviceId()).size() == 1 && computePath(completePath, trace, outputPath.getOutput())) { trace.addResultMessage("Connect point out " + cp + " is same as initial input " + in); trace.setSuccess(false); } } else if (!Collections.disjoint(hostsList, hosts)) { //If the two host collections contain the same item it means we reached the proper output log.debug("Stopping here because host is expected destination {}, reached through", completePath); if (computePath(completePath, trace, outputPath.getOutput())) { trace.addResultMessage("Reached required destination Host " + cp); trace.setSuccess(true); } break; } else if (cp.port().equals(PortNumber.CONTROLLER)) { //Getting the master when the packet gets sent as packet in NodeId master = mastershipNib.getMasterFor(cp.deviceId()); // TODO if we don't need to print master node id, exclude mastership NIB which is used only here trace.addResultMessage(PACKET_TO_CONTROLLER + " " + master.id()); computePath(completePath, trace, outputPath.getOutput()); handleVlanToController(outputPath, trace); } else if (linkNib.getEgressLinks(cp).size() > 0) { //TODO this can be optimized if we use a Tree structure for paths. //if we already have outputs let's check if the one we are considering starts from one of the devices // in any of the ones we have. if (trace.getCompletePaths().size() > 0) { ConnectPoint inputForOutput = null; List<ConnectPoint> previousPath = new ArrayList<>(); for (List<ConnectPoint> path : trace.getCompletePaths()) { for (ConnectPoint connect : path) { //if the path already contains the input for the output we've found we use it if (connect.equals(in)) { inputForOutput = connect; previousPath = path; break; } } } //we use the pre-existing path up to the point we fork to a new output if (inputForOutput != null && completePath.contains(inputForOutput)) { List<ConnectPoint> temp = new ArrayList<>(previousPath); temp = temp.subList(0, previousPath.indexOf(inputForOutput) + 1); if (completePath.containsAll(temp)) { completePath = temp; } } } //let's add the ouput for the input completePath.add(cp); //let's compute the links for the given output Set<Link> links = linkNib.getEgressLinks(cp); log.debug("Egress Links {}", links); //For each link we trace the corresponding device for (Link link : links) { ConnectPoint dst = link.dst(); //change in-port to the dst link in port Builder updatedPacket = DefaultTrafficSelector.builder(); outputPath.getFinalPacket().criteria().forEach(updatedPacket::add); updatedPacket.add(Criteria.matchInPort(dst.port())); log.debug("DST Connect Point {}", dst); //build the elements for that device traceInDevice(trace, updatedPacket.build(), dst, isDualHomed, completePath); //continue the trace along the path getTrace(completePath, dst, trace, isDualHomed); } } else if (edgePortNib.isEdgePoint(outputPath.getOutput()) && trace.getInitialPacket().getCriterion(Criterion.Type.ETH_DST) != null && ((EthCriterion) trace.getInitialPacket().getCriterion(Criterion.Type.ETH_DST)) .mac().isMulticast()) { trace.addResultMessage("Packet is multicast and reached output " + outputPath.getOutput() + " which is enabled and is edge port"); trace.setSuccess(true); computePath(completePath, trace, outputPath.getOutput()); if (!hasOtherOutput(in.deviceId(), trace, outputPath.getOutput())) { return trace; } } else if (deviceNib.getPort(cp) != null && deviceNib.getPort(cp).isEnabled()) { EthTypeCriterion ethTypeCriterion = (EthTypeCriterion) trace.getInitialPacket() .getCriterion(Criterion.Type.ETH_TYPE); //We treat as correct output only if it's not LLDP or BDDP if (!(ethTypeCriterion.ethType().equals(EtherType.LLDP.ethType()) && !ethTypeCriterion.ethType().equals(EtherType.BDDP.ethType()))) { if (computePath(completePath, trace, outputPath.getOutput())) { if (hostsList.isEmpty()) { trace.addResultMessage("Packet is " + ((EthTypeCriterion) outputPath.getFinalPacket() .getCriterion(Criterion.Type.ETH_TYPE)).ethType() + " and reached " + cp + " with no hosts connected "); } else { IpAddress ipAddress = null; if (trace.getInitialPacket().getCriterion(Criterion.Type.IPV4_DST) != null) { ipAddress = ((IPCriterion) trace.getInitialPacket() .getCriterion(Criterion.Type.IPV4_DST)).ip().address(); } else if (trace.getInitialPacket().getCriterion(Criterion.Type.IPV6_DST) != null) { ipAddress = ((IPCriterion) trace.getInitialPacket() .getCriterion(Criterion.Type.IPV6_DST)).ip().address(); } if (ipAddress != null) { IpAddress finalIpAddress = ipAddress; if (hostsList.stream().anyMatch(host -> host.ipAddresses().contains(finalIpAddress)) || hostNib.getHostsByIp(finalIpAddress).isEmpty()) { trace.addResultMessage("Packet is " + ((EthTypeCriterion) outputPath.getFinalPacket() .getCriterion(Criterion.Type.ETH_TYPE)).ethType() + " and reached " + cp + " with hosts " + hostsList); } else { trace.addResultMessage("Wrong output " + cp + " for required destination ip " + ipAddress); trace.setSuccess(false); } } else { trace.addResultMessage("Packet is " + ((EthTypeCriterion) outputPath.getFinalPacket() .getCriterion(Criterion.Type.ETH_TYPE)).ethType() + " and reached " + cp + " with hosts " + hostsList); } } trace.setSuccess(true); } } } else { computePath(completePath, trace, cp); trace.setSuccess(false); if (deviceNib.getPort(cp) == null) { //Port is not existent on device. log.warn("Port {} is not available on device.", cp); trace.addResultMessage("Port " + cp + "is not available on device. Packet is dropped"); } else { //No links means that the packet gets dropped. log.warn("No links out of {}", cp); trace.addResultMessage("No links depart from " + cp + ". Packet is dropped"); } } } return trace; } /** * If the initial packet comes tagged with a Vlan we output it with that to ONOS. * If ONOS applied a vlan we remove it. * * @param outputPath the output * @param trace the trace we are building */ private void handleVlanToController(GroupsInDevice outputPath, StaticPacketTrace trace) { VlanIdCriterion initialVid = (VlanIdCriterion) trace.getInitialPacket().getCriterion(Criterion.Type.VLAN_VID); VlanIdCriterion finalVid = (VlanIdCriterion) outputPath.getFinalPacket().getCriterion(Criterion.Type.VLAN_VID); if (initialVid != null && !initialVid.equals(finalVid) && initialVid.vlanId().equals(VlanId.NONE)) { Set<Criterion> finalCriteria = new HashSet<>(outputPath.getFinalPacket().criteria()); //removing the final vlanId finalCriteria.remove(finalVid); Builder packetUpdated = DefaultTrafficSelector.builder(); finalCriteria.forEach(packetUpdated::add); //Initial was none so we set it to that packetUpdated.add(Criteria.matchVlanId(VlanId.NONE)); //Update final packet outputPath.setFinalPacket(packetUpdated.build()); } } /** * Checks if the device has other outputs than the given connect point. * * @param inDeviceId the device * @param trace the trace we are building * @param cp an output connect point * @return true if the device has other outputs. */ private boolean hasOtherOutput(DeviceId inDeviceId, StaticPacketTrace trace, ConnectPoint cp) { return trace.getGroupOuputs(inDeviceId).stream().filter(groupsInDevice -> { return !groupsInDevice.getOutput().equals(cp); }).count() > 0; } /** * Checks if the path contains the device. * * @param completePath the path * @param deviceId the device to check * @return true if the path contains the device */ //TODO might prove costly, improvement: a class with both CPs and DeviceIds point. private boolean pathContainsDevice(List<ConnectPoint> completePath, DeviceId deviceId) { for (ConnectPoint cp : completePath) { if (cp.deviceId().equals(deviceId)) { return true; } } return false; } /** * Gets the hosts for the given initial packet. * * @param trace the trace we are building * @return set of the hosts we are trying to reach */ private Set<Host> getHosts(StaticPacketTrace trace) { IPCriterion ipv4Criterion = ((IPCriterion) trace.getInitialPacket() .getCriterion(Criterion.Type.IPV4_DST)); IPCriterion ipv6Criterion = ((IPCriterion) trace.getInitialPacket() .getCriterion(Criterion.Type.IPV6_DST)); Set<Host> hosts = new HashSet<>(); if (ipv4Criterion != null) { hosts.addAll(hostNib.getHostsByIp(ipv4Criterion.ip().address())); } if (ipv6Criterion != null) { hosts.addAll(hostNib.getHostsByIp(ipv6Criterion.ip().address())); } EthCriterion ethCriterion = ((EthCriterion) trace.getInitialPacket() .getCriterion(Criterion.Type.ETH_DST)); if (ethCriterion != null) { hosts.addAll(hostNib.getHostsByMac(ethCriterion.mac())); } return hosts; } /** * Computes the list of traversed connect points. * * @param completePath the list of devices * @param trace the trace we are building * @param output the final output connect point */ private boolean computePath(List<ConnectPoint> completePath, StaticPacketTrace trace, ConnectPoint output) { List<ConnectPoint> traverseList = new ArrayList<>(); if (!completePath.contains(trace.getInitialConnectPoint())) { traverseList.add(trace.getInitialConnectPoint()); } if (output != null && trace.getInitialConnectPoint().deviceId().equals(output.deviceId())) { trace.addCompletePath(ImmutableList.of(trace.getInitialConnectPoint(), output)); return true; } traverseList.addAll(completePath); if (output != null && !completePath.contains(output)) { traverseList.add(output); } if (!trace.getCompletePaths().contains(traverseList)) { trace.addCompletePath(ImmutableList.copyOf(traverseList)); return true; } return false; } /** * Traces the packet inside a device starting from an input connect point. * * @param trace the trace we are building * @param packet the packet we are tracing * @param in the input connect point. * @param isDualHomed true if the trace we are doing starts or ends in a dual homed host * @param completePath the path up until this device * @return updated trace */ private StaticPacketTrace traceInDevice(StaticPacketTrace trace, TrafficSelector packet, ConnectPoint in, boolean isDualHomed, List<ConnectPoint> completePath) { boolean multipleRoutes = false; if (trace.getGroupOuputs(in.deviceId()) != null) { multipleRoutes = multipleRoutes(trace); } if (trace.getGroupOuputs(in.deviceId()) != null && !isDualHomed && !multipleRoutes) { log.debug("Trace already contains device and given outputs"); return trace; } log.debug("Packet {} coming in from {}", packet, in); //if device is not available exit here. if (!deviceNib.isAvailable(in.deviceId())) { trace.addResultMessage("Device is offline " + in.deviceId()); computePath(completePath, trace, null); return trace; } //handle when the input is the controller //NOTE, we are using the input port as a convenience to carry the CONTROLLER port number even if // a packet in from the controller will not actually traverse the pipeline and have no such notion // as the input port. if (in.port().equals(PortNumber.CONTROLLER)) { StaticPacketTrace outputTrace = inputFromController(trace, in); if (outputTrace != null) { return trace; } } List<FlowEntry> flows = new ArrayList<>(); List<FlowEntry> outputFlows = new ArrayList<>(); List<Instruction> deferredInstructions = new ArrayList<>(); FlowEntry nextTableIdEntry = findNextTableIdEntry(in.deviceId(), -1); if (nextTableIdEntry == null) { trace.addResultMessage("No flow rules for device " + in.deviceId() + ". Aborting"); computePath(completePath, trace, null); trace.setSuccess(false); return trace; } TableId tableId = nextTableIdEntry.table(); FlowEntry flowEntry; boolean output = false; while (!output) { log.debug("Searching a Flow Entry on table {} for packet {}", tableId, packet); //get the rule that matches the incoming packet flowEntry = matchHighestPriority(packet, in, tableId); log.debug("Found Flow Entry {}", flowEntry); boolean isOfdpaHardware = TroubleshootUtils.hardwareOfdpaMap .getOrDefault(driverNib.getDriverName(in.deviceId()), false); //if the flow entry on a table is null and we are on hardware we treat as table miss, with few exceptions if (flowEntry == null && isOfdpaHardware) { log.debug("Ofdpa Hw setup, no flow rule means table miss"); if (((IndexTableId) tableId).id() == 27) { //Apparently a miss but Table 27 on OFDPA is a fixed table packet = handleOfdpa27FixedTable(trace, packet); } //Finding next table to go In case of miss nextTableIdEntry = findNextTableIdEntry(in.deviceId(), ((IndexTableId) tableId).id()); log.debug("Next table id entry {}", nextTableIdEntry); //FIXME find better solution that enable granularity greater than 0 or all rules //(another possibility is max tableId) if (nextTableIdEntry == null && flows.size() == 0) { trace.addResultMessage("No matching flow rules for device " + in.deviceId() + ". Aborting"); computePath(completePath, trace, null); trace.setSuccess(false); return trace; } else if (nextTableIdEntry == null) { //Means that no more flow rules are present output = true; } else if (((IndexTableId) tableId).id() == 20) { //if the table is 20 OFDPA skips to table 50 log.debug("A miss on Table 20 on OFDPA means that we skip directly to table 50"); tableId = IndexTableId.of(50); } else if (((IndexTableId) tableId).id() == 40) { //if the table is 40 OFDPA skips to table 60 log.debug("A miss on Table 40 on OFDPA means that we skip directly to table 60"); tableId = IndexTableId.of(60); } else { tableId = nextTableIdEntry.table(); } } else if (flowEntry == null) { trace.addResultMessage("Packet has no match on table " + tableId + " in device " + in.deviceId() + ". Dropping"); computePath(completePath, trace, null); trace.setSuccess(false); return trace; } else { //IF the table has a transition if (flowEntry.treatment().tableTransition() != null) { //update the next table we transitions to tableId = IndexTableId.of(flowEntry.treatment().tableTransition().tableId()); log.debug("Flow Entry has transition to table Id {}", tableId); flows.add(flowEntry); } else { //table has no transition so it means that it's an output rule if on the last table log.debug("Flow Entry has no transition to table, treating as last rule {}", flowEntry); flows.add(flowEntry); outputFlows.add(flowEntry); output = true; } //update the packet according to the immediate actions of this flow rule. packet = updatePacket(packet, flowEntry.treatment().immediate()).build(); //save the deferred rules for later deferredInstructions.addAll(flowEntry.treatment().deferred()); //If the flow requires to clear deferred actions we do so for all the ones we encountered. if (flowEntry.treatment().clearedDeferred()) { deferredInstructions.clear(); } //On table 10 OFDPA needs two rules to apply the vlan if none and then to transition to the next table. if (needsSecondTable10Flow(flowEntry, isOfdpaHardware)) { //Let's get the packet vlanId instruction VlanIdCriterion packetVlanIdCriterion = (VlanIdCriterion) packet.getCriterion(Criterion.Type.VLAN_VID); //Let's get the flow entry vlan mod instructions ModVlanIdInstruction entryModVlanIdInstruction = (ModVlanIdInstruction) flowEntry.treatment() .immediate().stream() .filter(instruction -> instruction instanceof ModVlanIdInstruction) .findFirst().orElse(null); //If the entry modVlan is not null we need to make sure that the packet has been updated and there // is a flow rule that matches on same criteria and with updated vlanId if (entryModVlanIdInstruction != null) { FlowEntry secondVlanFlow = getSecondFlowEntryOnTable10(packet, in, packetVlanIdCriterion, entryModVlanIdInstruction); //We found the flow that we expected if (secondVlanFlow != null) { flows.add(secondVlanFlow); } else { trace.addResultMessage("Missing forwarding rule for tagged packet on " + in); computePath(completePath, trace, null); return trace; } } } } } //Creating a modifiable builder for the output packet Builder builder = DefaultTrafficSelector.builder(); packet.criteria().forEach(builder::add); //Adding all the flows to the trace trace.addFlowsForDevice(in.deviceId(), ImmutableList.copyOf(flows)); List<PortNumber> outputPorts = new ArrayList<>(); List<FlowEntry> outputFlowEntries = handleFlows(trace, packet, in, outputFlows, builder, outputPorts); log.debug("Handling Groups"); //Analyze Groups List<Group> groups = new ArrayList<>(); Collection<FlowEntry> nonOutputFlows = flows; nonOutputFlows.removeAll(outputFlowEntries); //Handling groups pointed at by immediate instructions for (FlowEntry entry : flows) { getGroupsFromInstructions(trace, groups, entry.treatment().immediate(), entry.deviceId(), builder, outputPorts, in, completePath); } //If we have deferred instructions at this point we handle them. if (deferredInstructions.size() > 0) { builder = handleDeferredActions(trace, packet, in, deferredInstructions, outputPorts, groups, completePath); } packet = builder.build(); log.debug("Output Packet {}", packet); return trace; } private List<FlowEntry> handleFlows(StaticPacketTrace trace, TrafficSelector packet, ConnectPoint in, List<FlowEntry> outputFlows, Builder builder, List<PortNumber> outputPorts) { //TODO optimization //outputFlows contains also last rule of device, so we need filtering for OUTPUT instructions. List<FlowEntry> outputFlowEntries = outputFlows.stream().filter(flow -> flow.treatment() .allInstructions().stream().filter(instruction -> instruction.type() .equals(Instruction.Type.OUTPUT)).count() > 0).collect(Collectors.toList()); if (outputFlowEntries.size() > 1) { trace.addResultMessage("More than one flow rule with OUTPUT instruction"); log.warn("There cannot be more than one flow entry with OUTPUT instruction for {}", packet); } if (outputFlowEntries.size() == 1) { OutputInstruction outputInstruction = (OutputInstruction) outputFlowEntries.get(0).treatment() .allInstructions().stream() .filter(instruction -> { return instruction.type().equals(Instruction.Type.OUTPUT); }).findFirst().get(); //FIXME using GroupsInDevice for output even if flows. buildOutputFromDevice(trace, in, builder, outputPorts, outputInstruction, ImmutableList.of()); } return outputFlowEntries; } private boolean multipleRoutes(StaticPacketTrace trace) { boolean multipleRoutes = false; IPCriterion ipCriterion = ((IPCriterion) trace.getInitialPacket().getCriterion(Criterion.Type.IPV4_DST)); IpAddress ip = null; if (ipCriterion != null) { ip = ipCriterion.ip().address(); } else if (trace.getInitialPacket().getCriterion(Criterion.Type.IPV6_DST) != null) { ip = ((IPCriterion) trace.getInitialPacket().getCriterion(Criterion.Type.IPV6_DST)).ip().address(); } if (ip != null) { Optional<ResolvedRoute> optionalRoute = routeNib.longestPrefixLookup(ip); if (optionalRoute.isPresent()) { ResolvedRoute route = optionalRoute.get(); multipleRoutes = routeNib.getAllResolvedRoutes(route.prefix()).size() > 1; } } return multipleRoutes; } /** * Handles the specific case where the Input is the controller. * Note that the in port is used as a convenience to store the port of the controller even if the packet in * from a controller should not have a physical input port. The in port from the Controller is used to make sure * the flood to all active physical ports of the device. * * @param trace the trace * @param in the controller port * @return the augmented trace. */ private StaticPacketTrace inputFromController(StaticPacketTrace trace, ConnectPoint in) { EthTypeCriterion ethTypeCriterion = (EthTypeCriterion) trace.getInitialPacket() .getCriterion(Criterion.Type.ETH_TYPE); //If the packet is LLDP or BDDP we flood it on all active ports of the switch. if (ethTypeCriterion != null && (ethTypeCriterion.ethType().equals(EtherType.LLDP.ethType()) || ethTypeCriterion.ethType().equals(EtherType.BDDP.ethType()))) { //get the active ports List<Port> enabledPorts = deviceNib.getPorts(in.deviceId()).stream() .filter(Port::isEnabled) .collect(Collectors.toList()); //build an output from each one enabledPorts.forEach(port -> { GroupsInDevice output = new GroupsInDevice(new ConnectPoint(port.element().id(), port.number()), ImmutableList.of(), trace.getInitialPacket()); trace.addGroupOutputPath(in.deviceId(), output); }); return trace; } return null; } private boolean needsSecondTable10Flow(FlowEntry flowEntry, boolean isOfdpaHardware) { return isOfdpaHardware && flowEntry.table().equals(IndexTableId.of(10)) && flowEntry.selector().getCriterion(Criterion.Type.VLAN_VID) != null && ((VlanIdCriterion) flowEntry.selector().getCriterion(Criterion.Type.VLAN_VID)) .vlanId().equals(VlanId.NONE); } /** * Method that finds a flow rule on table 10 that matches the packet and the VLAN of the already * found rule on table 10. This is because OFDPA needs two rules on table 10, first to apply the rule, * second to transition to following table * * @param packet the incoming packet * @param in the input connect point * @param packetVlanIdCriterion the vlan criterion from the packet * @param entryModVlanIdInstruction the entry vlan instruction * @return the second flow entry that matched */ private FlowEntry getSecondFlowEntryOnTable10(TrafficSelector packet, ConnectPoint in, VlanIdCriterion packetVlanIdCriterion, ModVlanIdInstruction entryModVlanIdInstruction) { FlowEntry secondVlanFlow = null; //Check the packet has been update from the first rule. if (packetVlanIdCriterion.vlanId().equals(entryModVlanIdInstruction.vlanId())) { //find a rule on the same table that matches the vlan and // also all the other elements of the flow such as input port secondVlanFlow = Lists.newArrayList(flowNib.getFlowEntriesByState(in.deviceId(), FlowEntry.FlowEntryState.ADDED) .iterator()).stream() .filter(entry -> { return entry.table().equals(IndexTableId.of(10)); }) .filter(entry -> { VlanIdCriterion criterion = (VlanIdCriterion) entry.selector() .getCriterion(Criterion.Type.VLAN_VID); return criterion != null && match(packet, entry) && criterion.vlanId().equals(entryModVlanIdInstruction.vlanId()); }).findFirst().orElse(null); } return secondVlanFlow; } /** * Handles table 27 in Ofpda which is a fixed table not visible to any controller that handles Mpls Labels. * * @param packet the incoming packet * @return the updated packet */ private TrafficSelector handleOfdpa27FixedTable(StaticPacketTrace trace, TrafficSelector packet) { log.debug("Handling table 27 on OFDPA, removing mpls ETH Type and change mpls label"); Criterion mplsCriterion = packet.getCriterion(Criterion.Type.ETH_TYPE); ImmutableList.Builder<Instruction> builder = ImmutableList.builder(); //If the pakcet comes in with the expected elements we update it as per OFDPA spec. if (mplsCriterion != null && ((EthTypeCriterion) mplsCriterion).ethType() .equals(EtherType.MPLS_UNICAST.ethType())) { //TODO update with parsing with eth MPLS pop Instruction for treating label an bos Instruction ethInstruction = Instructions.popMpls(((EthTypeCriterion) trace.getInitialPacket() .getCriterion(Criterion.Type.ETH_TYPE)).ethType()); //FIXME what do we use as L3_Unicast mpls Label ? //translateInstruction(builder, ethInstruction); builder.add(ethInstruction); } packet = updatePacket(packet, builder.build()).build(); return packet; } /** * Finds the flow entry with the minimun next table Id. * * @param deviceId the device to search * @param currentId the current id. the search will use this as minimum * @return the flow entry with the minimum table Id after the given one. */ private FlowEntry findNextTableIdEntry(DeviceId deviceId, int currentId) { final Comparator<FlowEntry> comparator = Comparator.comparing((FlowEntry f) -> ((IndexTableId) f.table()).id()); return Lists.newArrayList(flowNib.getFlowEntriesByState(deviceId, FlowEntry.FlowEntryState.ADDED) .iterator()).stream() .filter(f -> ((IndexTableId) f.table()).id() > currentId).min(comparator).orElse(null); } private Builder handleDeferredActions(StaticPacketTrace trace, TrafficSelector packet, ConnectPoint in, List<Instruction> deferredInstructions, List<PortNumber> outputPorts, List<Group> groups, List<ConnectPoint> completePath) { //Update the packet with the deferred instructions Builder builder = updatePacket(packet, deferredInstructions); //Gather any output instructions from the deferred instruction List<Instruction> outputFlowInstruction = deferredInstructions.stream().filter(instruction -> { return instruction.type().equals(Instruction.Type.OUTPUT); }).collect(Collectors.toList()); //We are considering deferred instructions from flows, there can only be one output. if (outputFlowInstruction.size() > 1) { trace.addResultMessage("More than one flow rule with OUTPUT instruction"); log.warn("There cannot be more than one flow entry with OUTPUT instruction for {}", packet); } //If there is one output let's go through that if (outputFlowInstruction.size() == 1) { buildOutputFromDevice(trace, in, builder, outputPorts, (OutputInstruction) outputFlowInstruction.get(0), ImmutableList.of()); } //If there is no output let's see if there any deferred instruction point to groups. if (outputFlowInstruction.size() == 0) { getGroupsFromInstructions(trace, groups, deferredInstructions, in.deviceId(), builder, outputPorts, in, completePath); } return builder; } /** * Gets group information from instructions. * * @param trace the trace we are building * @param groupsForDevice the set of groups for this device * @param instructions the set of instructions we are searching for groups. * @param deviceId the device we are considering * @param builder the builder of the input packet * @param outputPorts the output ports for that packet */ private void getGroupsFromInstructions(StaticPacketTrace trace, List<Group> groupsForDevice, List<Instruction> instructions, DeviceId deviceId, Builder builder, List<PortNumber> outputPorts, ConnectPoint in, List<ConnectPoint> completePath) { List<Instruction> groupInstructionlist = new ArrayList<>(); // sort instructions according to priority (larger Instruction.Type ENUM constant first) // which enables to treat other actions before the OUTPUT action //TODO improve the priority scheme according to the OpenFlow ActionSet spec List<Instruction> instructionsSorted = new ArrayList<>(); instructionsSorted.addAll(instructions); instructionsSorted.sort((instr1, instr2) -> { return Integer.compare(instr2.type().ordinal(), instr1.type().ordinal()); }); for (Instruction instruction : instructionsSorted) { log.debug("Considering Instruction {}", instruction); //if the instruction is not group we need to update the packet or add the output //to the possible outputs for this packet if (!instruction.type().equals(Instruction.Type.GROUP)) { //if the instruction is not group we need to update the packet or add the output //to the possible outputs for this packet if (instruction.type().equals(Instruction.Type.OUTPUT)) { buildOutputFromDevice(trace, in, builder, outputPorts, (OutputInstruction) instruction, ImmutableList.copyOf(groupsForDevice)); //clearing the groups because we start from the top. groupsForDevice.clear(); } else { builder = translateInstruction(builder, instruction); } } else { //if the instuction is pointing to a group we need to get the group groupInstructionlist.add(instruction); } } //handle all the internal instructions pointing to a group. for (Instruction instr : groupInstructionlist) { GroupInstruction groupInstruction = (GroupInstruction) instr; Group group = Lists.newArrayList(groupNib.getGroups(deviceId)).stream().filter(groupInternal -> { return groupInternal.id().equals(groupInstruction.groupId()); }).findAny().orElse(null); if (group == null) { trace.addResultMessage("Null group for Instruction " + instr); trace.setSuccess(false); break; } if (group.buckets().buckets().size() == 0) { trace.addResultMessage("Group " + group.id() + " has no buckets"); trace.setSuccess(false); computePath(completePath, trace, null); break; } //Cycle in each of the group's buckets and add them to the groups for this Device. for (GroupBucket bucket : group.buckets().buckets()) { //add the group to the traversed groups if (!groupsForDevice.contains(group)) { groupsForDevice.add(group); } getGroupsFromInstructions(trace, groupsForDevice, bucket.treatment().allInstructions(), deviceId, builder, outputPorts, in, completePath); } } } /** * Check if the output is the input port, if so adds a dop result message, otherwise builds * a possible output from this device. * * @param trace the trace * @param in the input connect point * @param builder the packet builder * @param outputPorts the list of output ports for this device * @param outputInstruction the output instruction * @param groupsForDevice the groups we output from */ private void buildOutputFromDevice(StaticPacketTrace trace, ConnectPoint in, Builder builder, List<PortNumber> outputPorts, OutputInstruction outputInstruction, List<Group> groupsForDevice) { ConnectPoint output = new ConnectPoint(in.deviceId(), outputInstruction.port()); outputPorts.add(outputInstruction.port()); GroupsInDevice device = new GroupsInDevice(output, groupsForDevice, builder.build()); if (trace.getGroupOuputs(output.deviceId()) != null && trace.getGroupOuputs(output.deviceId()).contains(device)) { return; } trace.addGroupOutputPath(in.deviceId(), new GroupsInDevice(output, groupsForDevice, builder.build())); } /** * Applies all give instructions to the input packet. * * @param packet the input packet * @param instructions the set of instructions * @return the packet with the applied instructions */ private Builder updatePacket(TrafficSelector packet, List<Instruction> instructions) { Builder newSelector = DefaultTrafficSelector.builder(); packet.criteria().forEach(newSelector::add); //FIXME optimize for (Instruction instruction : instructions) { newSelector = translateInstruction(newSelector, instruction); } return newSelector; } /** * Applies an instruction to the packet in the form of a selector. * * @param newSelector the packet selector * @param instruction the instruction to be translated * @return the new selector with the applied instruction */ private Builder translateInstruction(Builder newSelector, Instruction instruction) { log.debug("Translating instruction {}", instruction); log.debug("New Selector {}", newSelector.build()); //TODO add as required Criterion criterion = null; switch (instruction.type()) { case L2MODIFICATION: L2ModificationInstruction l2Instruction = (L2ModificationInstruction) instruction; switch (l2Instruction.subtype()) { case VLAN_ID: ModVlanIdInstruction vlanIdInstruction = (ModVlanIdInstruction) instruction; VlanId id = vlanIdInstruction.vlanId(); criterion = Criteria.matchVlanId(id); break; case VLAN_POP: criterion = Criteria.matchVlanId(VlanId.NONE); break; case MPLS_PUSH: ModMplsHeaderInstruction mplsEthInstruction = (ModMplsHeaderInstruction) instruction; criterion = Criteria.matchEthType(mplsEthInstruction.ethernetType().toShort()); break; case MPLS_POP: ModMplsHeaderInstruction mplsPopInstruction = (ModMplsHeaderInstruction) instruction; criterion = Criteria.matchEthType(mplsPopInstruction.ethernetType().toShort()); //When popping MPLS we remove label and BOS TrafficSelector temporaryPacket = newSelector.build(); if (temporaryPacket.getCriterion(Criterion.Type.MPLS_LABEL) != null) { Builder noMplsSelector = DefaultTrafficSelector.builder(); temporaryPacket.criteria().stream().filter(c -> { return !c.type().equals(Criterion.Type.MPLS_LABEL) && !c.type().equals(Criterion.Type.MPLS_BOS); }).forEach(noMplsSelector::add); newSelector = noMplsSelector; } break; case MPLS_LABEL: ModMplsLabelInstruction mplsLabelInstruction = (ModMplsLabelInstruction) instruction; criterion = Criteria.matchMplsLabel(mplsLabelInstruction.label()); newSelector.matchMplsBos(true); break; case ETH_DST: ModEtherInstruction modEtherDstInstruction = (ModEtherInstruction) instruction; criterion = Criteria.matchEthDst(modEtherDstInstruction.mac()); break; case ETH_SRC: ModEtherInstruction modEtherSrcInstruction = (ModEtherInstruction) instruction; criterion = Criteria.matchEthSrc(modEtherSrcInstruction.mac()); break; default: log.debug("Unsupported L2 Instruction"); break; } break; default: log.debug("Unsupported Instruction"); break; } if (criterion != null) { log.debug("Adding criterion {}", criterion); newSelector.add(criterion); } return newSelector; } /** * Finds the rule in the device that mathces the input packet and has the highest priority. * * @param packet the input packet * @param in the connect point the packet comes in from * @param tableId the table to search * @return the flow entry */ private FlowEntry matchHighestPriority(TrafficSelector packet, ConnectPoint in, TableId tableId) { //Computing the possible match rules. final Comparator<FlowEntry> comparator = Comparator.comparing(FlowRule::priority); return Lists.newArrayList(flowNib.getFlowEntriesByState(in.deviceId(), FlowEntry.FlowEntryState.ADDED) .iterator()).stream() .filter(flowEntry -> { return flowEntry.table().equals(tableId); }) .filter(flowEntry -> { return match(packet, flowEntry); }).max(comparator).orElse(null); } /** * Matches the packet with the given flow entry. * * @param packet the packet to match * @param flowEntry the flow entry to match the packet against * @return true if the packet matches the flow. */ private boolean match(TrafficSelector packet, FlowEntry flowEntry) { return flowEntry.selector().criteria().stream().allMatch(criterion -> { Criterion.Type type = criterion.type(); //If the criterion has IP we need to do LPM to establish matching. if (type.equals(Criterion.Type.IPV4_SRC) || type.equals(Criterion.Type.IPV4_DST) || type.equals(Criterion.Type.IPV6_SRC) || type.equals(Criterion.Type.IPV6_DST)) { return matchIp(packet, (IPCriterion) criterion); //we check that the packet contains the criterion provided by the flow rule. } else if (type.equals(Criterion.Type.ETH_SRC_MASKED)) { return matchMac(packet, (EthCriterion) criterion, false); } else if (type.equals(Criterion.Type.ETH_DST_MASKED)) { return matchMac(packet, (EthCriterion) criterion, true); } else { return packet.criteria().contains(criterion); } }); } /** * Checks if the packet has an dst or src IP and if that IP matches the subnet of the ip criterion. * * @param packet the incoming packet * @param criterion the criterion to match * @return true if match */ private boolean matchIp(TrafficSelector packet, IPCriterion criterion) { IPCriterion matchCriterion = (IPCriterion) packet.getCriterion(criterion.type()); //if the packet does not have an IPv4 or IPv6 criterion we return true if (matchCriterion == null) { return false; } try { log.debug("Checking if {} is under {}", matchCriterion.ip(), criterion.ip()); Subnet subnet = Subnet.createInstance(criterion.ip().toString()); return subnet.isInSubnet(matchCriterion.ip().address().toInetAddress()); } catch (UnknownHostException e) { return false; } } /** * Checks if the packet has a dst or src MAC and if that Mac matches the mask of the mac criterion. * * @param packet the incoming packet * @param hitCriterion the criterion to match * @param dst true if we are checking DST MAC * @return true if match */ private boolean matchMac(TrafficSelector packet, EthCriterion hitCriterion, boolean dst) { //Packet can have only one EthCriterion EthCriterion matchCriterion; if (dst) { matchCriterion = (EthCriterion) packet.criteria().stream().filter(criterion1 -> { return criterion1.type().equals(Criterion.Type.ETH_DST_MASKED) || criterion1.type().equals(Criterion.Type.ETH_DST); }).findFirst().orElse(null); } else { matchCriterion = (EthCriterion) packet.criteria().stream().filter(criterion1 -> { return criterion1.type().equals(Criterion.Type.ETH_SRC_MASKED) || criterion1.type().equals(Criterion.Type.ETH_SRC); }).findFirst().orElse(null); } //if the packet does not have an ETH criterion we return true if (matchCriterion == null) { return true; } log.debug("Checking if {} is under {}/{}", matchCriterion.mac(), hitCriterion.mac(), hitCriterion.mask()); return compareMac(matchCriterion.mac(), hitCriterion.mac(), hitCriterion.mask()); } }
apache-2.0
ndandoulakis/ndTorrent
src/com/ndtorrent/client/RollingTotal.java
754
package com.ndtorrent.client; public final class RollingTotal { private double[] buckets; private double total; public RollingTotal(int nbuckets) { if (nbuckets < 1) throw new IllegalArgumentException( "nbuckets must be greater than zero"); buckets = new double[nbuckets]; } public void add(double amount) { buckets[0] += amount; total += amount; } public void roll() { // Roll after you have queried the total. total -= buckets[buckets.length - 1]; for (int i = buckets.length - 1; i >= 1; i--) { buckets[i] = buckets[i - 1]; } buckets[0] = 0; } public double total() { return total; } public double average() { return total / buckets.length; } public double[] array() { return buckets; } }
apache-2.0
sarl/sarl
main/coreplugins/io.sarl.eclipse/src/io/sarl/eclipse/util/AbstractSarlScriptInteractiveSelector.java
15149
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2021 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.eclipse.util; import java.lang.reflect.InvocationTargetException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import javax.inject.Inject; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IStorage; import org.eclipse.core.runtime.CoreException; import org.eclipse.debug.core.DebugPlugin; import org.eclipse.debug.core.ILaunchManager; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.jdt.core.IJavaProject; import org.eclipse.jdt.core.IPackageFragment; import org.eclipse.jdt.core.IPackageFragmentRoot; import org.eclipse.jdt.core.IType; import org.eclipse.jdt.core.JavaModelException; import org.eclipse.jdt.internal.debug.ui.JDIDebugUIPlugin; import org.eclipse.jface.viewers.ILabelProvider; import org.eclipse.jface.viewers.ILabelProviderListener; import org.eclipse.jface.window.Window; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.dialogs.ElementListSelectionDialog; import org.eclipse.ui.progress.IProgressService; import org.eclipse.xtext.EcoreUtil2; import org.eclipse.xtext.ui.resource.IResourceSetProvider; import org.eclipse.xtext.ui.resource.IStorage2UriMapper; import org.eclipse.xtext.util.Pair; import io.sarl.eclipse.SARLEclipsePlugin; import io.sarl.eclipse.launching.config.ILaunchConfigurationConfigurator; import io.sarl.lang.core.Agent; import io.sarl.lang.sarl.SarlScript; /** Abstract implementation of a utility class that enables to find the elements within a * SARL script and interactively select it. * * @param <ET> the type of the valid objects that are supported by this shortcut. * @param <JT> the type of the valid objects that are supported by this shortcut. * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ * @since 0.7 */ @SuppressWarnings("checkstyle:classfanoutcomplexity") public abstract class AbstractSarlScriptInteractiveSelector<ET extends EObject, JT> { /** Mapping from storage resource to URI. */ @Inject protected IStorage2UriMapper storage2UriMapper; /** Configurator of launch config. */ @Inject protected ILaunchConfigurationConfigurator configurator; /** Provider of resource set. */ @Inject protected IResourceSetProvider resourceSetProvider; /** Converter from JDT objects to Ecore objects. */ @Inject protected Jdt2Ecore jdt; /** Provider of labels and icons. */ @Inject protected ILabelProvider labelProvider; /** Replies if the given resource could be considered for discovering an agent to be launched. * * @param resource the resource. * @return {@code true} if the resource could be explored. */ @SuppressWarnings("static-method") protected boolean isValidResource(IResource resource) { return resource.isAccessible() && !resource.isHidden() && !resource.isPhantom() && !resource.isDerived(); } /** Replies if the given element could be considered as selectable. * * @param element the element to test. * @return {@code true} if the element could be selected. */ protected abstract boolean isSelectableElement(ET element); /** Replies the URI of the resource in which the given object is located if it is an eligible object. * * @param object the object. * @return the URI. * @since 0.7 */ protected abstract URI getResourceURIForValidEObject(Object object); /** Replies the URI of the type of the valid types. * * @return the type. * @since 0.7 */ protected abstract Class<ET> getValidEObjectType(); /** * Returns the singleton launch manager. * * @return launch manager */ protected static ILaunchManager getLaunchManager() { return DebugPlugin.getDefault().getLaunchManager(); } /** * Convenient method to return the active workbench window shell. * * @return active workbench window shell */ protected static Shell getShell() { return JDIDebugUIPlugin.getActiveWorkbenchShell(); } /** Replies the qualified name of the given element. * * @param element the element. * @return the qualified name. */ protected abstract String getQualifiedNameFor(ET element); /** Replies the supported elements that are into the given selection. * * @param selection the selected elements. * @param progress the progress monitor. * @return the supported elements within the selection * @throws InvocationTargetException on failure when retrieving an element. * @throws InterruptedException if the search is cancelled. */ @SuppressWarnings({"unchecked", "checkstyle:cyclomaticcomplexity", "checkstyle:nestedifdepth"}) private List<ElementDescription> findElements(Object[] selection, IProgressService progress) throws InvocationTargetException, InterruptedException { final List<ElementDescription> descs = new ArrayList<>(); progress.busyCursorWhile(monitor -> { try { monitor.beginTask( MessageFormat.format(Messages.AbstractSarlScriptInteractiveSelector_0, getElementsLabel()), selection.length); for (final Object element : selection) { final URI fileURI = getResourceURIForValidEObject(element); if (fileURI != null) { for (final Pair<IStorage, IProject> storage: this.storage2UriMapper.getStorages(fileURI)) { descs.add(new ElementDescription( storage.getSecond().getName(), getQualifiedNameFor((ET) element), element)); break; } } else { final LinkedList<Object> stack = new LinkedList<>(); stack.add(element); final Class<ET> validElementType = getValidEObjectType(); while (!stack.isEmpty()) { final Object current = stack.removeFirst(); if (current instanceof IFile) { final IFile file = (IFile) current; if (isValidResource(file)) { final ResourceSet resourceSet = this.resourceSetProvider.get(file.getProject()); final URI resourceURI = URI.createPlatformResourceURI(file.getFullPath().toString(), true); if (resourceURI != null) { try { final Resource resource = resourceSet.getResource(resourceURI, true); if (resource != null) { final String projectName = file.getProject().getName(); for (final EObject content : resource.getContents()) { if (content instanceof SarlScript) { final List<ET> types = EcoreUtil2.getAllContentsOfType(content, validElementType); for (final ET elt : types) { if (isSelectableElement(elt)) { descs.add(new ElementDescription( projectName, getQualifiedNameFor(elt), elt)); } } } } } } catch (Throwable exception) { // The exception is ignore because it is assumed it is caused by a // file from which a Xtext resource cannot be extracted. } } } } else if (current instanceof IFolder) { final IFolder folder = (IFolder) current; if (isValidResource(folder)) { try { stack.addAll(Arrays.asList(folder.members(0))); } catch (CoreException exception) { // Ignore the failing resources } } } else if (current instanceof IType) { final IType type = (IType) current; final String qn = type.getFullyQualifiedName(); final IJavaProject project = type.getJavaProject(); if (this.jdt.isSubClassOf(this.jdt.toTypeFinder(project), qn, Agent.class.getName())) { descs.add(new ElementDescription(project.getElementName(), qn, type)); } } else if (current instanceof IPackageFragment) { final IPackageFragment fragment = (IPackageFragment) current; stack.addAll(Arrays.asList(fragment.getNonJavaResources())); for (final Object child : fragment.getChildren()) { stack.add(child); } } else if (current instanceof IPackageFragmentRoot) { final IPackageFragmentRoot fragment = (IPackageFragmentRoot) current; stack.addAll(Arrays.asList(fragment.getNonJavaResources())); for (final Object child : fragment.getChildren()) { stack.add(child); } } else if (current instanceof IJavaProject) { stack.addAll(Arrays.asList(((IJavaProject) current).getNonJavaResources())); } } } monitor.worked(1); } } catch (JavaModelException exception) { throw new InvocationTargetException(exception); } }); return descs; } /** Search the elements based on the given scope, and select one. * If more than one element was found, the user selects interactively one. * * @param showEmptySelectionError indicates if this function shows an error when the selection is empty. * @param scope the elements to consider for an element type that can be launched. * @return the selected element; or {@code null} if there is no selection. */ public ElementDescription searchAndSelect(boolean showEmptySelectionError, Object... scope) { try { final List<ElementDescription> elements = findElements(scope, PlatformUI.getWorkbench().getProgressService()); ElementDescription element = null; if (elements == null || elements.isEmpty()) { if (showEmptySelectionError) { SARLEclipsePlugin.getDefault().openError(getShell(), Messages.AbstractSarlScriptInteractiveSelector_1, MessageFormat.format(Messages.AbstractSarlScriptInteractiveSelector_2, getElementLabel()), null, null); } } else if (elements.size() > 1) { element = chooseElement(elements); } else { element = elements.get(0); } return element; } catch (InterruptedException exception) { // } catch (Exception exception) { SARLEclipsePlugin.getDefault().openError(getShell(), Messages.AbstractSarlScriptInteractiveSelector_1, null, null, exception); } return null; } /** Replies the text that describes a single element to select. * * @return the text describing a single element to select. */ protected abstract String getElementLabel(); /** Replies the long text that describes a single element to select. * * @return the long text describing a single element to select. */ protected abstract String getElementLongLabel(); /** Replies the text that describes multiple element to select. * * @return the text describing multiple element to select. */ protected abstract String getElementsLabel(); /** * Prompts the user to select an element from the given element types. * * @param elements the element types to choose from. * @return the selected element or {@code null} if none. */ private ElementDescription chooseElement(List<ElementDescription> elements) { final ElementListSelectionDialog dialog = new ElementListSelectionDialog(getShell(), new LabelProvider()); dialog.setElements(elements.toArray()); dialog.setTitle(MessageFormat.format(Messages.AbstractSarlScriptInteractiveSelector_3, getElementLabel())); dialog.setMessage(MessageFormat.format(Messages.AbstractSarlScriptInteractiveSelector_3, getElementLongLabel())); dialog.setMultipleSelection(false); final int result = dialog.open(); if (result == Window.OK) { return (ElementDescription) dialog.getFirstResult(); } return null; } /** Replies the icon associated to the elements. * * @param element the element for which the icon should be replied, or {@code null} if it is unknown. * @return the icon. */ protected abstract Image getElementImage(Object element); /** Description of an element to launch. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ * @since 0.7 */ public static class ElementDescription { /** Project name. */ @SuppressWarnings("checkstyle:visibilitymodifier") public final String projectName; /** Element fully qualified name. */ @SuppressWarnings("checkstyle:visibilitymodifier") public final String elementName; /** Element fully qualified name. */ @SuppressWarnings("checkstyle:visibilitymodifier") public final Object element; /** Constructor. * * @param project the name of the project. * @param name the name of the element to launch. * @param element the object. */ protected ElementDescription(String project, String name, Object element) { this.projectName = project; this.elementName = name; this.element = element; } @Override public String toString() { return this.elementName; } } /** Label provider. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ private class LabelProvider implements ILabelProvider { /** Constructor. */ LabelProvider() { // } @Override public void addListener(ILabelProviderListener listener) { AbstractSarlScriptInteractiveSelector.this.labelProvider.addListener(listener); } @Override public void dispose() { AbstractSarlScriptInteractiveSelector.this.labelProvider.dispose(); } @Override public boolean isLabelProperty(Object element, String property) { return AbstractSarlScriptInteractiveSelector.this.labelProvider.isLabelProperty(element, property); } @Override public void removeListener(ILabelProviderListener listener) { AbstractSarlScriptInteractiveSelector.this.labelProvider.removeListener(listener); } @Override public Image getImage(Object element) { if (element instanceof ElementDescription) { return getElementImage(((ElementDescription) element).element); } return AbstractSarlScriptInteractiveSelector.this.labelProvider.getImage(element); } @Override public String getText(Object element) { if (element instanceof ElementDescription) { return ((ElementDescription) element).elementName; } return AbstractSarlScriptInteractiveSelector.this.labelProvider.getText(element); } } }
apache-2.0
wired-mind/usher
src/test/java/io/cozmic/usher/test/localintegration/LogOutputTests.java
3580
package io.cozmic.usher.test.localintegration; import io.cozmic.usher.Start; import io.vertx.core.DeploymentOptions; import io.vertx.core.Handler; import io.vertx.core.Vertx; import io.vertx.core.VertxOptions; import io.vertx.core.json.JsonObject; import io.vertx.core.net.NetClient; import io.vertx.core.net.NetSocket; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; import org.apache.commons.codec.binary.Hex; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; import static org.junit.Assert.fail; /** * File plugin is a work in progress. Putting in just enough now to write debug logs */ @RunWith(VertxUnitRunner.class) public class LogOutputTests { Vertx vertx; private LogHandler fakeLogHandler; @Before public void before(TestContext context) { vertx = Vertx.vertx(); Logger testLogger = Logger.getLogger("test_logger"); fakeLogHandler = new LogHandler(); fakeLogHandler.setLevel(Level.ALL); testLogger.setLevel(Level.ALL); testLogger.setUseParentHandlers(false); testLogger.addHandler(fakeLogHandler); } @After public void after(TestContext context) { vertx.close(context.asyncAssertSuccess()); } /** * Log plugin is rudimentary for now. It will return a single byte to indicate the Write is done * @param context */ @Test public void testCanWriteToLog(TestContext context) { final DeploymentOptions options = buildDeploymentOptions(); vertx.deployVerticle(Start.class.getName(), options, context.asyncAssertSuccess(deploymentID -> { final Async async = context.async(); final NetClient netClient = vertx.createNetClient(); netClient.connect(2500, "localhost", context.asyncAssertSuccess(socket -> { socket.handler(buffer -> { context.assertEquals((byte) 0x1, buffer.getByte(0)); context.assertEquals(Hex.encodeHexString("Hello Log".getBytes()), fakeLogHandler.lastRecord.getMessage()); async.complete(); }); socket.write("Hello Log"); })); vertx.setTimer(5000, event -> context.fail("timed out")); })); } public DeploymentOptions buildDeploymentOptions() { JsonObject config = null; try { final URI uri = getClass().getResource("/config_log_output.json").toURI(); final String configString = new String(Files.readAllBytes(Paths.get(uri))); config = new JsonObject(configString); } catch (URISyntaxException | IOException e) { fail(e.getMessage()); } final DeploymentOptions options = new DeploymentOptions(); options.setConfig(config); return options; } class LogHandler extends java.util.logging.Handler { Level lastLevel = Level.FINEST; private LogRecord lastRecord; public Level checkLevel() { return lastLevel; } public void close(){} @Override public void publish(LogRecord record) { lastRecord = record; } public void flush(){} } }
apache-2.0
freeVM/freeVM
enhanced/buildtest/tests/functional/src/test/functional/org/apache/harmony/test/func/api/javax/management/monitor/gaugemonitor/sensors/RandomFunction.java
2405
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.test.func.api.javax.management.monitor.gaugemonitor.sensors; import java.util.Random; import org.apache.harmony.share.Base; public class RandomFunction implements RandomFunctionMBean, Runnable { public static final String SENSOR_NAME_TEMPLATE = "org.apache.harmony.test.func." + "api.javax.management.monitor.gaugemonitor:type=Sensor,id="; private double value; public static boolean isHigh; public static boolean isNotify = false; public static boolean stop = false; private final Object sync; public RandomFunction(Object prpsdSync) { this.sync = prpsdSync; } public double getValue() { return value; } private void note(double val) { if (isNotify) { if (isHigh && (val > 25)) { Base.log.add("RandomFunction: High value reached"); } if (!isHigh && (val < -25)) { Base.log.add("RandomFunction: Low value reached"); } } } public void run() { Random random = new Random(); while (!stop) { try { value = 50 - (100 * random.nextDouble()); // System.out.print(value + " "); note(value); Thread.sleep(100); } catch (InterruptedException e) { Base.log.add("Monotonous Function: Thread Interrupted."); e.printStackTrace(); synchronized (sync) { sync.notify(); } } } } }
apache-2.0
kamalmarhubi/bazel
src/main/java/com/google/devtools/build/lib/bazel/rules/java/BazelJavaSemantics.java
13220
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.rules.java; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode; import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.Runfiles; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.ComputedSubstitution; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.Substitution; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.Template; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.rules.java.DeployArchiveBuilder; import com.google.devtools.build.lib.rules.java.DeployArchiveBuilder.Compression; import com.google.devtools.build.lib.rules.java.JavaCommon; import com.google.devtools.build.lib.rules.java.JavaCompilationArtifacts; import com.google.devtools.build.lib.rules.java.JavaCompilationHelper; import com.google.devtools.build.lib.rules.java.JavaConfiguration; import com.google.devtools.build.lib.rules.java.JavaHelper; import com.google.devtools.build.lib.rules.java.JavaPrimaryClassProvider; import com.google.devtools.build.lib.rules.java.JavaSemantics; import com.google.devtools.build.lib.rules.java.JavaTargetAttributes; import com.google.devtools.build.lib.rules.java.JavaUtil; import com.google.devtools.build.lib.rules.java.Jvm; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.util.ShellEscaper; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * Semantics for Bazel Java rules */ public class BazelJavaSemantics implements JavaSemantics { public static final BazelJavaSemantics INSTANCE = new BazelJavaSemantics(); private static final Template STUB_SCRIPT = Template.forResource(BazelJavaSemantics.class, "java_stub_template.txt"); private static final String JAVABUILDER_CLASS_NAME = "com.google.devtools.build.buildjar.BazelJavaBuilder"; private BazelJavaSemantics() { } private boolean isJavaBinaryOrJavaTest(RuleContext ruleContext) { String ruleClass = ruleContext.getRule().getRuleClass(); return ruleClass.equals("java_binary") || ruleClass.equals("java_test"); } @Override public void checkRule(RuleContext ruleContext, JavaCommon javaCommon) { if (isJavaBinaryOrJavaTest(ruleContext)) { checkMainClass(ruleContext, javaCommon); } } private String getMainClassInternal(RuleContext ruleContext) { return ruleContext.getRule().isAttrDefined("main_class", Type.STRING) ? ruleContext.attributes().get("main_class", Type.STRING) : ""; } private void checkMainClass(RuleContext ruleContext, JavaCommon javaCommon) { boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN); String mainClass = getMainClassInternal(ruleContext); if (!createExecutable && !mainClass.isEmpty()) { ruleContext.ruleError("main class must not be specified when executable is not created"); } if (createExecutable && mainClass.isEmpty()) { if (javaCommon.getSrcsArtifacts().isEmpty()) { ruleContext.ruleError( "need at least one of 'main_class', 'use_testrunner' or Java source files"); } mainClass = javaCommon.determinePrimaryClass(javaCommon.getSrcsArtifacts()); if (mainClass == null) { ruleContext.ruleError("cannot determine main class for launching " + "(found neither a source file '" + ruleContext.getTarget().getName() + ".java', nor a main_class attribute, and package name " + "doesn't include 'java' or 'javatests')"); } } } @Override public String getMainClass(RuleContext ruleContext, JavaCommon javaCommon) { checkMainClass(ruleContext, javaCommon); return getMainClassInternal(ruleContext); } @Override public ImmutableList<Artifact> collectResources(RuleContext ruleContext) { if (!ruleContext.getRule().isAttrDefined("resources", BuildType.LABEL_LIST)) { return ImmutableList.of(); } return ruleContext.getPrerequisiteArtifacts("resources", Mode.TARGET).list(); } @Override public Artifact createInstrumentationMetadataArtifact( RuleContext ruleContext, Artifact outputJar) { return null; } @Override public Iterable<Artifact> getInstrumentationJars(RuleContext context) { return ImmutableList.of(); } @Override public void buildJavaCommandLine(Collection<Artifact> outputs, BuildConfiguration configuration, CustomCommandLine.Builder result) { } @Override public void createStubAction(RuleContext ruleContext, final JavaCommon javaCommon, List<String> jvmFlags, Artifact executable, String javaStartClass, String javaExecutable) { Preconditions.checkState(ruleContext.getConfiguration().hasFragment(Jvm.class)); Preconditions.checkNotNull(jvmFlags); Preconditions.checkNotNull(executable); Preconditions.checkNotNull(javaStartClass); Preconditions.checkNotNull(javaExecutable); List<Substitution> arguments = new ArrayList<>(); String workspacePrefix = ruleContext.getWorkspaceName(); if (!workspacePrefix.isEmpty()) { workspacePrefix += "/"; } arguments.add(Substitution.of("%workspace_prefix%", workspacePrefix)); arguments.add(Substitution.of("%javabin%", javaExecutable)); arguments.add(Substitution.of("%needs_runfiles%", ruleContext.getFragment(Jvm.class).getJavaExecutable().isAbsolute() ? "0" : "1")); arguments.add(new ComputedSubstitution("%classpath%") { @Override public String getValue() { StringBuilder buffer = new StringBuilder(); Iterable<Artifact> jars = javaCommon.getRuntimeClasspath(); appendRunfilesRelativeEntries(buffer, jars, ':'); return buffer.toString(); } }); arguments.add(Substitution.of("%java_start_class%", ShellEscaper.escapeString(javaStartClass))); arguments.add(Substitution.ofSpaceSeparatedList("%jvm_flags%", jvmFlags)); ruleContext.registerAction(new TemplateExpansionAction( ruleContext.getActionOwner(), executable, STUB_SCRIPT, arguments, true)); } /** * Builds a class path by concatenating the root relative paths of the artifacts separated by the * delimiter. Each relative path entry is prepended with "${RUNPATH}" which will be expanded by * the stub script at runtime, to either "${JAVA_RUNFILES}/" or if we are lucky, the empty * string. * * @param buffer the buffer to use for concatenating the entries * @param artifacts the entries to concatenate in the buffer * @param delimiter the delimiter character to separate the entries */ private static void appendRunfilesRelativeEntries(StringBuilder buffer, Iterable<Artifact> artifacts, char delimiter) { for (Artifact artifact : artifacts) { if (buffer.length() > 0) { buffer.append(delimiter); } buffer.append("${RUNPATH}"); buffer.append(artifact.getRootRelativePath().getPathString()); } } @Override public void addRunfilesForBinary(RuleContext ruleContext, Artifact launcher, Runfiles.Builder runfilesBuilder) { } @Override public void addRunfilesForLibrary(RuleContext ruleContext, Runfiles.Builder runfilesBuilder) { } @Override public void collectTargetsTreatedAsDeps( RuleContext ruleContext, ImmutableList.Builder<TransitiveInfoCollection> builder) { } @Override public Iterable<String> getExtraJavacOpts(RuleContext ruleContext) { return ImmutableList.<String>of(); } @Override public void addProviders(RuleContext ruleContext, JavaCommon javaCommon, List<String> jvmFlags, Artifact classJar, Artifact srcJar, Artifact genJar, Artifact gensrcJar, ImmutableMap<Artifact, Artifact> compilationToRuntimeJarMap, JavaCompilationHelper helper, NestedSetBuilder<Artifact> filesBuilder, RuleConfiguredTargetBuilder ruleBuilder) { if (isJavaBinaryOrJavaTest(ruleContext)) { boolean createExec = ruleContext.attributes().get("create_executable", Type.BOOLEAN); ruleBuilder.add(JavaPrimaryClassProvider.class, new JavaPrimaryClassProvider(createExec ? getMainClassInternal(ruleContext) : null)); } } @Override public Iterable<String> getJvmFlags( RuleContext ruleContext, JavaCommon javaCommon, List<String> userJvmFlags) { return userJvmFlags; } @Override public String addCoverageSupport(JavaCompilationHelper helper, JavaTargetAttributes.Builder attributes, Artifact executable, Artifact instrumentationMetadata, JavaCompilationArtifacts.Builder javaArtifactsBuilder, String mainClass) { return mainClass; } @Override public CustomCommandLine buildSingleJarCommandLine(BuildConfiguration configuration, Artifact output, String mainClass, ImmutableList<String> manifestLines, Iterable<Artifact> buildInfoFiles, ImmutableList<Artifact> resources, Iterable<Artifact> classpath, boolean includeBuildData, Compression compression, Artifact launcher) { return DeployArchiveBuilder.defaultSingleJarCommandLine(output, mainClass, manifestLines, buildInfoFiles, resources, classpath, includeBuildData, compression, launcher).build(); } @Override public Collection<Artifact> translate(RuleContext ruleContext, JavaConfiguration javaConfig, List<Artifact> messages) { return ImmutableList.<Artifact>of(); } @Override public Artifact getLauncher(RuleContext ruleContext, JavaCommon common, DeployArchiveBuilder deployArchiveBuilder, Runfiles.Builder runfilesBuilder, List<String> jvmFlags, JavaTargetAttributes.Builder attributesBuilder, boolean shouldStrip) { return JavaHelper.launcherArtifactForTarget(this, ruleContext); } @Override public void addDependenciesForRunfiles(RuleContext ruleContext, Runfiles.Builder builder) { } @Override public boolean forceUseJavaLauncherTarget(RuleContext ruleContext) { return false; } @Override public void addArtifactToJavaTargetAttribute(JavaTargetAttributes.Builder builder, Artifact srcArtifact) { } @Override public void commonDependencyProcessing(RuleContext ruleContext, JavaTargetAttributes.Builder attributes, Collection<? extends TransitiveInfoCollection> deps) { } @Override public PathFragment getDefaultJavaResourcePath(PathFragment path) { // Look for src/.../resources to match Maven repository structure. for (int i = 0; i < path.segmentCount() - 2; ++i) { if (path.getSegment(i).equals("src") && path.getSegment(i + 2).equals("resources")) { return path.subFragment(i + 3, path.segmentCount()); } } PathFragment javaPath = JavaUtil.getJavaPath(path); return javaPath == null ? path : javaPath; } @Override public List<String> getExtraArguments(RuleContext ruleContext, JavaCommon javaCommon) { if (ruleContext.getRule().getRuleClass().equals("java_test")) { if (ruleContext.getConfiguration().getTestArguments().isEmpty() && !ruleContext.attributes().isAttributeValueExplicitlySpecified("args")) { ImmutableList.Builder<String> builder = ImmutableList.builder(); for (Artifact artifact : javaCommon.getSrcsArtifacts()) { PathFragment path = artifact.getRootRelativePath(); String className = JavaUtil.getJavaFullClassname(FileSystemUtils.removeExtension(path)); if (className != null) { builder.add(className); } } return builder.build(); } } return ImmutableList.<String>of(); } @Override public String getJavaBuilderMainClass() { return JAVABUILDER_CLASS_NAME; } }
apache-2.0
alancnet/artifactory
web/common/src/main/java/org/artifactory/common/wicket/component/label/tooltip/TooltipLabel.java
2902
/* * Artifactory is a binaries repository manager. * Copyright (C) 2012 JFrog Ltd. * * Artifactory is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Artifactory is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Artifactory. If not, see <http://www.gnu.org/licenses/>. */ package org.artifactory.common.wicket.component.label.tooltip; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.artifactory.common.wicket.WicketProperty; import org.artifactory.common.wicket.behavior.tooltip.TooltipBehavior; /** * @author Yoav Aharoni */ public class TooltipLabel extends Label { /** * Maximum length of the text in the label. If the text the column will display characters up to the maxLength * and trailing dots ('...') */ private int maxLength; @WicketProperty private transient String tooltip; private transient String text; public TooltipLabel(String id, String label, int maxLength) { this(id, Model.of(label), maxLength); } public TooltipLabel(String id, IModel model, int maxLength) { super(id, model); this.maxLength = maxLength; add(new TooltipBehavior(new PropertyModel(this, "tooltip"))); } @Override protected void onBeforeRender() { super.onBeforeRender(); Object modelObject = getDefaultModelObject(); if (modelObject == null) { text = null; tooltip = null; return; } String modelObjectString = modelObject.toString(); if (modelObjectString.length() > maxLength) { text = getDefaultModelObjectAsString(modelObjectString.substring(0, maxLength)) + "..."; tooltip = getDefaultModelObjectAsString(modelObjectString); } else { text = getDefaultModelObjectAsString(modelObjectString); tooltip = null; } } protected void setText(String text) { this.text = text; } protected void setTooltip(String tooltip) { this.tooltip = tooltip; } @Override public void onComponentTagBody(MarkupStream markupStream, ComponentTag openTag) { replaceComponentTagBody(markupStream, openTag, text); } }
apache-2.0
downdrown/VaadinHighChartsAPI
src/main/java/at/downdrown/vaadinaddons/highchartsapi/model/data/base/FloatData.java
512
package at.downdrown.vaadinaddons.highchartsapi.model.data.base; public class FloatData implements HighChartsBaseData { private float value; public FloatData(float value) { this.value = value; } public float getValue() { return value; } public void setValue(float value) { this.value = value; } @Override public String getHighChartValue() { return String.valueOf(this.value); } @Override public String toString() { return this.getHighChartValue(); } }
apache-2.0
juddgaddie/sshj
src/main/java/net/schmizz/sshj/xfer/scp/ScpCommandLine.java
3278
package net.schmizz.sshj.xfer.scp; import java.util.LinkedHashMap; /** * Command line to be sent to the remote SSH process to setup an SCP process in the correct mode. */ public class ScpCommandLine { private static final String SCP_COMMAND = "scp"; private EscapeMode mode; enum Arg { SOURCE('f'), SINK('t'), RECURSIVE('r'), VERBOSE('v'), PRESERVE_TIMES('p'), QUIET('q'), LIMIT('l'); private final char a; private Arg(char a) { this.a = a; } @Override public String toString() { return "-" + a; } } public enum EscapeMode { NoEscape, Space { @Override String escapedPath(String path) { return path.replace(" ", "\\ "); } }, DoubleQuote { @Override String escapedPath(String path) { return "\"" + path.replace("\"", "\\\"") + "\""; } }, SingleQuote { @Override String escapedPath(String path) { return "\'" + path.replace("'", "\\'") + "'"; } }; String escapedPath(String path) { return path; } } private LinkedHashMap<Arg, String> arguments = new LinkedHashMap<Arg, String>(); private String path; ScpCommandLine() { } static ScpCommandLine with(Arg name) { return with(name, null, true); } static ScpCommandLine with(Arg name, String value) { return with(name, value, true); } static ScpCommandLine with(Arg name, boolean accept) { return with(name, null, accept); } static ScpCommandLine with(Arg name, String value, boolean accept) { ScpCommandLine commandLine = new ScpCommandLine(); commandLine.addArgument(name, value, accept); return commandLine; } private void addArgument(Arg name, String value, boolean accept) { if (accept) { arguments.put(name, value); } } ScpCommandLine and(Arg name) { addArgument(name, null, true); return this; } ScpCommandLine and(Arg name, String value) { addArgument(name, value, true); return this; } ScpCommandLine and(Arg name, boolean accept) { addArgument(name, null, accept); return this; } ScpCommandLine and(Arg name, String value, boolean accept) { addArgument(name, value, accept); return this; } ScpCommandLine withPath(String path, EscapeMode mode) { this.path = path; this.mode = mode; return this; } String toCommandLine() { final StringBuilder cmd = new StringBuilder(SCP_COMMAND); for (Arg arg : arguments.keySet()) { cmd.append(" ").append(arg); String s = arguments.get(arg); if (s != null && !s.trim().isEmpty()) { cmd.append(s); } } cmd.append(" "); if (path == null || path.trim().isEmpty()) { cmd.append("."); } else { cmd.append(mode.escapedPath(path)); } return cmd.toString(); } }
apache-2.0
akarnokd/Reactive4JavaFlow
src/test/java/hu/akarnokd/reactive4javaflow/tck/WindowExactSizeTckTest.java
1125
/* * Copyright 2017 David Karnok * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package hu.akarnokd.reactive4javaflow.tck; import hu.akarnokd.reactive4javaflow.Folyam; import org.testng.annotations.Test; import java.util.List; import java.util.concurrent.Flow; @Test public class WindowExactSizeTckTest extends BaseTck<List<Long>> { @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public Flow.Publisher<List<Long>> createFlowPublisher(long elements) { return Folyam.fromIterable(iterate(elements)) .window(2) .flatMap(v -> (Folyam)v) ; } }
apache-2.0
JihongMA/incubator-carbondata
processing/src/main/java/org/apache/carbondata/processing/newflow/dictionary/DictionaryServerClientDictionary.java
3228
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.processing.newflow.dictionary; import java.util.Map; import org.apache.carbondata.core.cache.dictionary.Dictionary; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.devapi.BiDictionary; import org.apache.carbondata.core.devapi.DictionaryGenerationException; import org.apache.carbondata.core.dictionary.client.DictionaryClient; import org.apache.carbondata.core.dictionary.generator.key.DictionaryKey; /** * Dictionary implementation along with dictionary server client to get new dictionary values */ public class DictionaryServerClientDictionary implements BiDictionary<Integer, Object> { private Dictionary dictionary; private DictionaryClient client; private Map<Object, Integer> localCache; private DictionaryKey dictionaryKey; private int base; private Object lock = new Object(); public DictionaryServerClientDictionary(Dictionary dictionary, DictionaryClient client, DictionaryKey key, Map<Object, Integer> localCache) { this.dictionary = dictionary; this.client = client; this.dictionaryKey = key; this.localCache = localCache; this.base = (dictionary == null ? 0 : dictionary.getDictionaryChunks().getSize() - 1); } @Override public Integer getOrGenerateKey(Object value) throws DictionaryGenerationException { Integer key = getKey(value); if (key == null) { synchronized (lock) { dictionaryKey.setData(value); dictionaryKey.setThreadNo(Thread.currentThread().getId() + ""); DictionaryKey dictionaryValue = client.getDictionary(dictionaryKey); key = (Integer) dictionaryValue.getData(); localCache.put(value, key); } return key + base; } return key; } @Override public Integer getKey(Object value) { Integer key = -1; if (dictionary != null) { key = dictionary.getSurrogateKey(value.toString()); } if (key == CarbonCommonConstants.INVALID_SURROGATE_KEY) { key = localCache.get(value); if (key != null) { return key + base; } } return key; } @Override public Object getValue(Integer key) { throw new UnsupportedOperationException("Not supported here"); } @Override public int size() { dictionaryKey.setType("SIZE"); int size = (int) client.getDictionary(dictionaryKey).getData() + base; return size; } }
apache-2.0
yuri0x7c1/ofbiz-explorer
src/test/resources/apache-ofbiz-17.12.04/framework/entityext/src/main/java/org/apache/ofbiz/entityext/eca/EntityEcaCondition.java
6445
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.entityext.eca; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.ObjectType; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.entity.GenericEntity; import org.apache.ofbiz.entity.GenericEntityException; import org.apache.ofbiz.service.DispatchContext; import org.w3c.dom.Element; /** * EntityEcaCondition */ @SuppressWarnings("serial") public final class EntityEcaCondition implements java.io.Serializable { public static final String module = EntityEcaCondition.class.getName(); private final String lhsValueName, rhsValueName; private final String operator; private final String compareType; private final String format; private final boolean constant; public EntityEcaCondition(Element condition, boolean constant) { this.lhsValueName = condition.getAttribute("field-name"); this.constant = constant; if (constant) { this.rhsValueName = condition.getAttribute("value"); } else { this.rhsValueName = condition.getAttribute("to-field-name"); } this.operator = condition.getAttribute("operator"); this.compareType = condition.getAttribute("type"); this.format = condition.getAttribute("format"); } public boolean eval(DispatchContext dctx, GenericEntity value) throws GenericEntityException { if (dctx == null || value == null || dctx.getClassLoader() == null) { throw new GenericEntityException("Cannot have null Value or DispatchContext!"); } if (Debug.verboseOn()) Debug.logVerbose(this.toString(), module); Object lhsValue = value.get(lhsValueName); Object rhsValue; if (constant) { rhsValue = rhsValueName; } else { rhsValue = value.get(rhsValueName); } if (Debug.verboseOn()) Debug.logVerbose("Comparing : " + lhsValue + " " + operator + " " + rhsValue, module); // evaluate the condition & invoke the action(s) List<Object> messages = new LinkedList<Object>(); Boolean cond = ObjectType.doRealCompare(lhsValue, rhsValue, operator, compareType, format, messages, null, dctx.getClassLoader(), constant); // if any messages were returned send them out if (messages.size() > 0) { for (Object message: messages) { Debug.logWarning((String) message, module); } } if (cond != null) { return cond.booleanValue(); } else { return false; } } public String getLValue() { return this.lhsValueName; } public String getRValue() { if (constant && !rhsValueName.isEmpty()) { return "\"".concat(this.rhsValueName).concat("\""); } return this.rhsValueName; } public String getOperator() { return this.operator; } @Override public String toString() { StringBuilder buf = new StringBuilder(); if (UtilValidate.isNotEmpty(lhsValueName)) buf.append("[").append(lhsValueName).append("]"); if (UtilValidate.isNotEmpty(operator)) buf.append("[").append(operator).append("]"); if (UtilValidate.isNotEmpty(rhsValueName)) buf.append("[").append(rhsValueName).append("]"); if (UtilValidate.isNotEmpty(constant)) buf.append("[").append(constant).append("]"); if (UtilValidate.isNotEmpty(compareType)) buf.append("[").append(compareType).append("]"); if (UtilValidate.isNotEmpty(format)) buf.append("[").append(format).append("]"); return buf.toString(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((lhsValueName == null) ? 0 : lhsValueName.hashCode()); result = prime * result + ((operator == null) ? 0 : operator.hashCode()); result = prime * result + ((rhsValueName == null) ? 0 : rhsValueName.hashCode()); result = prime * result + (constant ? 1231 : 1237); result = prime * result + ((compareType == null) ? 0 : compareType.hashCode()); result = prime * result + ((format == null) ? 0 : format.hashCode()); return result; } @Override public boolean equals(Object obj) { if (obj instanceof EntityEcaCondition) { EntityEcaCondition other = (EntityEcaCondition) obj; if (!UtilValidate.areEqual(this.lhsValueName, other.lhsValueName)) return false; if (!UtilValidate.areEqual(this.rhsValueName, other.rhsValueName)) return false; if (!UtilValidate.areEqual(this.operator, other.operator)) return false; if (!UtilValidate.areEqual(this.compareType, other.compareType)) return false; if (!UtilValidate.areEqual(this.format, other.format)) return false; if (this.constant != other.constant) return false; return true; } else { return false; } } protected List<String> getFieldNames() { List<String> fieldNameList = new ArrayList<String>(); if( UtilValidate.isNotEmpty(lhsValueName) ) { fieldNameList.add(lhsValueName); } if(!constant && UtilValidate.isNotEmpty(rhsValueName)) { fieldNameList.add(rhsValueName); } return fieldNameList; } }
apache-2.0
ctc-g/sinavi-jfw
jdbc/jfw-mybatis-core/src/test/java/jp/co/ctc_g/jfw/core/jdbc/JxSqlSessionFactoryTest.java
1693
/* * Copyright (c) 2013 ITOCHU Techno-Solutions Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.co.ctc_g.jfw.core.jdbc; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.defaults.DefaultSqlSession; import org.apache.ibatis.session.defaults.DefaultSqlSessionFactory; import org.junit.Before; import org.junit.Test; public class JxSqlSessionFactoryTest { protected JxSqlSessionFactory factory; @Before public void instantiate() { factory = new JxSqlSessionFactory(new DefaultSqlSessionFactory(null)); } @Test public void SqlSessionをJxSqlSessionに加工して返却する() { Configuration config = new Configuration(); SqlSession session = new DefaultSqlSession(config, null); SqlSession wrapped = factory.wrap(session); assertThat(wrapped, is(instanceOf(JxSqlSession.class))); assertThat(wrapped.getConfiguration(), is(config)); // delegate 確認 } }
apache-2.0
petegerhat/dictionaryserver
src/IO/ConnectionIO.java
1144
package IO; import java.io.*; import java.net.Socket; /** * Created by IntelliJ IDEA. * User: swyna * Date: Jun 3, 2011 * Time: 2:38:31 AM * To change this template use File | Settings | File Templates. */ public class ConnectionIO { private PrintWriter output; private BufferedReader input; public ConnectionIO(Socket client) { try { output = new PrintWriter(new OutputStreamWriter(client.getOutputStream(), "UTF-8")); input = new BufferedReader(new InputStreamReader(client.getInputStream(), "UTF-8")); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } public void send(String message) { System.out.println("Sending message: " + message); output.println(message); output.flush(); } public String receive() { try { return input.readLine(); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } return null; } }
apache-2.0
iweiss/activemq-artemis
artemis-protocols/artemis-mqtt-protocol/src/main/java/org/apache/activemq/artemis/core/protocol/mqtt/MQTTConnection.java
7280
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.protocol.mqtt; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.remoting.CloseListener; import org.apache.activemq.artemis.core.remoting.FailureListener; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.spi.core.remoting.Connection; import org.apache.activemq.artemis.spi.core.remoting.ReadyListener; import javax.security.auth.Subject; public class MQTTConnection implements RemotingConnection { private final Connection transportConnection; private final long creationTime; private AtomicBoolean dataReceived; private boolean destroyed; private boolean connected; private String clientID; private final List<FailureListener> failureListeners = new CopyOnWriteArrayList<>(); private final List<CloseListener> closeListeners = new CopyOnWriteArrayList<>(); public MQTTConnection(Connection transportConnection) throws Exception { this.transportConnection = transportConnection; this.creationTime = System.currentTimeMillis(); this.dataReceived = new AtomicBoolean(); this.destroyed = false; } @Override public void scheduledFlush() { flush(); } @Override public boolean isWritable(ReadyListener callback) { return transportConnection.isWritable(callback) && transportConnection.isOpen(); } @Override public Object getID() { return transportConnection.getID(); } @Override public long getCreationTime() { return creationTime; } @Override public String getRemoteAddress() { return transportConnection.getRemoteAddress(); } @Override public void addFailureListener(FailureListener listener) { failureListeners.add(listener); } @Override public boolean removeFailureListener(FailureListener listener) { return failureListeners.remove(listener); } @Override public void addCloseListener(CloseListener listener) { closeListeners.add(listener); } @Override public boolean removeCloseListener(CloseListener listener) { return closeListeners.remove(listener); } @Override public List<CloseListener> removeCloseListeners() { List<CloseListener> deletedCloseListeners = copyCloseListeners(); closeListeners.clear(); return deletedCloseListeners; } @Override public void setCloseListeners(List<CloseListener> listeners) { closeListeners.clear(); closeListeners.addAll(listeners); } @Override public List<FailureListener> getFailureListeners() { return failureListeners; } @Override public List<FailureListener> removeFailureListeners() { List<FailureListener> deletedFailureListeners = copyFailureListeners(); failureListeners.clear(); return deletedFailureListeners; } @Override public void setFailureListeners(List<FailureListener> listeners) { failureListeners.clear(); failureListeners.addAll(listeners); } @Override public ActiveMQBuffer createTransportBuffer(int size) { return transportConnection.createTransportBuffer(size); } @Override public void fail(ActiveMQException me) { List<FailureListener> copy = copyFailureListeners(); for (FailureListener listener : copy) { listener.connectionFailed(me, false); } } private List<FailureListener> copyFailureListeners() { return new ArrayList<>(failureListeners); } private List<CloseListener> copyCloseListeners() { return new ArrayList<>(closeListeners); } @Override public void fail(ActiveMQException me, String scaleDownTargetNodeID) { synchronized (failureListeners) { for (FailureListener listener : failureListeners) { //FIXME(mtaylor) How do we check if the node has failed over? listener.connectionFailed(me, false); } } } @Override public void destroy() { //TODO(mtaylor) ensure this properly destroys this connection. destroyed = true; disconnect(false); } @Override public Connection getTransportConnection() { return transportConnection; } @Override public boolean isClient() { return false; } @Override public boolean isDestroyed() { return destroyed; } @Override public void disconnect(boolean criticalError) { transportConnection.forceClose(); } @Override public void disconnect(String scaleDownNodeID, boolean criticalError) { transportConnection.forceClose(); } protected void dataReceived() { dataReceived.set(true); } @Override public boolean checkDataReceived() { return dataReceived.compareAndSet(true, false); } @Override public void flush() { transportConnection.checkFlushBatchBuffer(); } @Override public void bufferReceived(Object connectionID, ActiveMQBuffer buffer) { } public void setConnected(boolean connected) { this.connected = connected; } public boolean getConnected() { return connected; } @Override public void killMessage(SimpleString nodeID) { //unsupported } @Override public boolean isSupportReconnect() { return false; } @Override public boolean isSupportsFlowControl() { return false; } @Override public Subject getSubject() { return null; } /** * Returns the name of the protocol for this Remoting Connection * * @return */ @Override public String getProtocolName() { return MQTTProtocolManagerFactory.MQTT_PROTOCOL_NAME; } /** * Sets the client ID associated with this connection * * @param cID * @return */ @Override public void setClientID(String cID) { this.clientID = cID; } /** * Returns the Client ID associated with this connection * * @return */ @Override public String getClientID() { return clientID; } @Override public String getTransportLocalAddress() { return getTransportConnection().getLocalAddress(); } }
apache-2.0
joshholl/intellij-csharp
gen/com/github/joshholl/intellij/csharp/lang/psi/impl/CSharpEnumTypeImpl.java
994
// This is a generated file. Not intended for manual editing. package com.github.joshholl.intellij.csharp.lang.psi.impl; import java.util.List; import org.jetbrains.annotations.*; import com.intellij.lang.ASTNode; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.util.PsiTreeUtil; import static com.github.joshholl.intellij.csharp.lang.lexer.CSharpTokenTypes.*; import com.intellij.extapi.psi.ASTWrapperPsiElement; import com.github.joshholl.intellij.csharp.lang.psi.*; public class CSharpEnumTypeImpl extends ASTWrapperPsiElement implements CSharpEnumType { public CSharpEnumTypeImpl(ASTNode node) { super(node); } public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof CSharpVisitor) ((CSharpVisitor)visitor).visitEnumType(this); else super.accept(visitor); } @Override @NotNull public CSharpTypeName getTypeName() { return findNotNullChildByClass(CSharpTypeName.class); } }
apache-2.0
hurzl/dmix
MPDroid/src/main/java/com/namelessdev/mpdroid/views/BaseDataBinder.java
4980
/* * Copyright (C) 2010-2016 The MPDroid Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.namelessdev.mpdroid.views; import com.anpmech.mpd.item.Item; import com.namelessdev.mpdroid.MPDApplication; import com.namelessdev.mpdroid.R; import com.namelessdev.mpdroid.adapters.ArrayDataBinder; import com.namelessdev.mpdroid.cover.AlbumCoverDownloadListener; import com.namelessdev.mpdroid.cover.CoverAsyncHelper; import com.namelessdev.mpdroid.cover.CoverDownloadListener; import com.namelessdev.mpdroid.cover.CoverManager; import com.namelessdev.mpdroid.helpers.AlbumInfo; import com.namelessdev.mpdroid.views.holders.AbstractViewHolder; import com.namelessdev.mpdroid.views.holders.AlbumCoverHolder; import android.content.Context; import android.content.SharedPreferences; import android.preference.PreferenceManager; import androidx.annotation.IdRes; import androidx.annotation.LayoutRes; import android.view.View; import java.util.List; abstract class BaseDataBinder<T extends Item<T>> implements ArrayDataBinder<T> { static final CharSequence SEPARATOR = " - "; final boolean mEnableCache; protected BaseDataBinder() { super(); final MPDApplication app = MPDApplication.getInstance(); final SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(app); mEnableCache = settings.getBoolean(CoverManager.PREFERENCE_CACHE, true); } protected static CoverAsyncHelper getCoverHelper(final AlbumCoverHolder holder, final int defaultSize) { final CoverAsyncHelper coverHelper = new CoverAsyncHelper(); final int height = holder.mAlbumCover.getHeight(); // If the list is not displayed yet, the height is 0. This is a // problem, so set a fallback one. if (height == 0) { coverHelper.setCoverMaxSize(defaultSize); } else { coverHelper.setCoverMaxSize(height); } coverHelper.resetCover(); return coverHelper; } protected static void loadArtwork(final CoverAsyncHelper coverHelper, final AlbumInfo albumInfo) { coverHelper.downloadCover(albumInfo); } protected static CoverDownloadListener setCoverListener(final AlbumCoverHolder holder, final CoverAsyncHelper coverHelper) { // listen for new artwork to be loaded final CoverDownloadListener acd = new AlbumCoverDownloadListener(holder.mAlbumCover, holder.mCoverArtProgress, false); final AlbumCoverDownloadListener oldAcd = (AlbumCoverDownloadListener) holder.mAlbumCover .getTag(R.id.AlbumCoverDownloadListener); if (oldAcd != null) { oldAcd.detach(); } holder.mAlbumCover.setTag(R.id.AlbumCoverDownloadListener, acd); holder.mAlbumCover.setTag(R.id.CoverAsyncHelper, coverHelper); coverHelper.addCoverDownloadListener(acd); return acd; } /** * This is a helper function for onLayoutInflation. * * @param targetView The view given by onLayoutInflation, from which the view will be found by * the {@code resource} given. * @param resource The resource id view to find. * @param isVisible If true, the visibility of the resource view will be set to {@code * View.VISIBLE}, otherwise the visibility of the resource view will be set * to {@code View.GONE}. * @return The unmodified targetView. */ static View setViewVisible(final View targetView, @IdRes final int resource, final boolean isVisible) { final View view = targetView.findViewById(resource); if (isVisible) { view.setVisibility(View.VISIBLE); } else { view.setVisibility(View.GONE); } return targetView; } @Override public abstract AbstractViewHolder findInnerViews(View targetView); @Override @LayoutRes public abstract int getLayoutId(); @Override public abstract boolean isEnabled(int position, List<T> items, Object item); @Override public abstract void onDataBind(Context context, View targetView, AbstractViewHolder viewHolder, List<T> items, Object item, int position); @Override public abstract View onLayoutInflation(Context context, View targetView, List<T> items); }
apache-2.0