repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
nvoron23/opensearchserver
src/main/java/com/jaeksoft/searchlib/renderer/RendererException.java
1350
/** * License Agreement for OpenSearchServer * * Copyright (C) 2014 Emmanuel Keller / Jaeksoft * * http://www.open-search-server.com * * This file is part of OpenSearchServer. * * OpenSearchServer is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * OpenSearchServer is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OpenSearchServer. * If not, see <http://www.gnu.org/licenses/>. **/ package com.jaeksoft.searchlib.renderer; import java.io.IOException; public class RendererException { public static class NoUserException extends IOException { private static final long serialVersionUID = -4666151734739611156L; public NoUserException(String msg) { super(msg); } } public static class AuthException extends IOException { private static final long serialVersionUID = -6740821564309267321L; public AuthException(String msg) { super(msg); } } }
gpl-3.0
chiwanpark/flamingo2
flamingo2-web/src/main/java/org/opencloudengine/flamingo2/engine/hawq/externaltable/Custom.java
1730
/** * Copyright (C) 2011 Flamingo Project (http://www.cloudine.io). * <p/> * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * <p/> * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * <p/> * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.opencloudengine.flamingo2.engine.hawq.externaltable; import org.opencloudengine.flamingo2.util.StringUtils; /** * HAWQ External Table Format : CUSTOM. * * @author Ha Neul, Kim * @since 2.0 */ public class Custom extends Format { private String formatter; public String getFormatter() { return formatter; } public void setFormatter(String formatter) { this.formatter = formatter; } @Override public String toString() { return "Custom{" + "type=" + this.getType() + ", formatter=" + formatter + '}'; } @Override public boolean isEmptyOptions() { return StringUtils.isEmpty(this.formatter); } @Override public String getFormatString() { String formatString = ""; if (!StringUtils.isEmpty(this.formatter)) { formatString += "FORMATTER = '" + this.formatter + "'"; } return formatString; } }
gpl-3.0
ThirdProject/android_external_whispersystems_TextSecure
src/org/thoughtcrime/securesms/notifications/MessageNotifier.java
15217
/** * Copyright (C) 2011 Whisper Systems * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.thoughtcrime.securesms.notifications; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.graphics.BitmapFactory; import android.graphics.Color; import android.media.AudioManager; import android.media.MediaPlayer; import android.net.Uri; import android.support.v4.app.NotificationCompat; import android.support.v4.app.NotificationCompat.BigTextStyle; import android.support.v4.app.NotificationCompat.InboxStyle; import android.text.Spannable; import android.text.SpannableString; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.text.style.StyleSpan; import android.util.Log; import org.thoughtcrime.securesms.R; import org.thoughtcrime.securesms.RoutingActivity; import org.thoughtcrime.securesms.contacts.ContactPhotoFactory; import org.thoughtcrime.securesms.database.PushDatabase; import org.thoughtcrime.securesms.recipients.RecipientFactory; import org.thoughtcrime.securesms.recipients.RecipientFormattingException; import org.whispersystems.textsecure.crypto.MasterSecret; import org.thoughtcrime.securesms.database.DatabaseFactory; import org.thoughtcrime.securesms.database.MmsSmsDatabase; import org.thoughtcrime.securesms.database.model.MessageRecord; import org.thoughtcrime.securesms.recipients.Recipient; import org.thoughtcrime.securesms.recipients.Recipients; import org.thoughtcrime.securesms.util.TextSecurePreferences; import org.whispersystems.textsecure.push.IncomingPushMessage; import java.io.IOException; import java.util.List; /** * Handles posting system notifications for new messages. * * * @author Moxie Marlinspike */ public class MessageNotifier { public static final int NOTIFICATION_ID = 1338; private volatile static long visibleThread = -1; public static void setVisibleThread(long threadId) { visibleThread = threadId; } public static void notifyMessageDeliveryFailed(Context context, Recipients recipients, long threadId) { if (visibleThread == threadId) { sendInThreadNotification(context); } else { Intent intent = new Intent(context, RoutingActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP); intent.putExtra("recipients", recipients); intent.putExtra("thread_id", threadId); intent.setData((Uri.parse("custom://"+System.currentTimeMillis()))); NotificationCompat.Builder builder = new NotificationCompat.Builder(context); builder.setSmallIcon(R.drawable.icon_notification); builder.setLargeIcon(BitmapFactory.decodeResource(context.getResources(), R.drawable.ic_list_alert_sms_failed)); builder.setContentTitle(context.getString(R.string.MessageNotifier_message_delivery_failed)); builder.setContentText(context.getString(R.string.MessageNotifier_failed_to_deliver_message)); builder.setTicker(context.getString(R.string.MessageNotifier_error_delivering_message)); builder.setContentIntent(PendingIntent.getActivity(context, 0, intent, 0)); builder.setAutoCancel(true); setNotificationAlarms(context, builder, true); ((NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE)) .notify((int)threadId, builder.build()); } } public static void updateNotification(Context context, MasterSecret masterSecret) { if (!TextSecurePreferences.isNotificationsEnabled(context)) { return; } updateNotification(context, masterSecret, false); } public static void updateNotification(Context context, MasterSecret masterSecret, long threadId) { if (!TextSecurePreferences.isNotificationsEnabled(context)) { return; } if (visibleThread == threadId) { DatabaseFactory.getThreadDatabase(context).setRead(threadId); sendInThreadNotification(context); } else { updateNotification(context, masterSecret, true); } } private static void updateNotification(Context context, MasterSecret masterSecret, boolean signal) { Cursor telcoCursor = null; Cursor pushCursor = null; try { telcoCursor = DatabaseFactory.getMmsSmsDatabase(context).getUnread(); pushCursor = DatabaseFactory.getPushDatabase(context).getPending(); if ((telcoCursor == null || telcoCursor.isAfterLast()) && (pushCursor == null || pushCursor.isAfterLast())) { ((NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE)) .cancel(NOTIFICATION_ID); return; } NotificationState notificationState = constructNotificationState(context, masterSecret, telcoCursor); appendPushNotificationState(context, masterSecret, notificationState, pushCursor); if (notificationState.hasMultipleThreads()) { sendMultipleThreadNotification(context, masterSecret, notificationState, signal); } else { sendSingleThreadNotification(context, masterSecret, notificationState, signal); } } finally { if (telcoCursor != null) telcoCursor.close(); if (pushCursor != null) pushCursor.close(); } } private static void sendSingleThreadNotification(Context context, MasterSecret masterSecret, NotificationState notificationState, boolean signal) { if (notificationState.getNotifications().isEmpty()) { ((NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE)) .cancel(NOTIFICATION_ID); return; } List<NotificationItem>notifications = notificationState.getNotifications(); NotificationCompat.Builder builder = new NotificationCompat.Builder(context); Recipient recipient = notifications.get(0).getIndividualRecipient(); builder.setSmallIcon(R.drawable.icon_notification); builder.setLargeIcon(recipient.getContactPhoto()); builder.setContentTitle(recipient.toShortString()); builder.setContentText(notifications.get(0).getText()); builder.setContentIntent(notifications.get(0).getPendingIntent(context)); if (masterSecret != null) { builder.addAction(R.drawable.check, context.getString(R.string.MessageNotifier_mark_as_read), notificationState.getMarkAsReadIntent(context, masterSecret)); } SpannableStringBuilder content = new SpannableStringBuilder(); for (NotificationItem item : notifications) { content.append(item.getBigStyleSummary()); content.append('\n'); } builder.setStyle(new BigTextStyle().bigText(content)); setNotificationAlarms(context, builder, signal); if (signal) { builder.setTicker(notifications.get(0).getTickerText()); } ((NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE)) .notify(NOTIFICATION_ID, builder.build()); } private static void sendMultipleThreadNotification(Context context, MasterSecret masterSecret, NotificationState notificationState, boolean signal) { List<NotificationItem> notifications = notificationState.getNotifications(); NotificationCompat.Builder builder = new NotificationCompat.Builder(context); builder.setSmallIcon(R.drawable.icon_notification); builder.setLargeIcon(BitmapFactory.decodeResource(context.getResources(), R.drawable.icon_notification)); builder.setContentTitle(String.format(context.getString(R.string.MessageNotifier_d_new_messages), notificationState.getMessageCount())); builder.setContentText(String.format(context.getString(R.string.MessageNotifier_most_recent_from_s), notifications.get(0).getIndividualRecipientName())); builder.setContentIntent(PendingIntent.getActivity(context, 0, new Intent(context, RoutingActivity.class), 0)); if (masterSecret != null) { builder.addAction(R.drawable.check, context.getString(R.string.MessageNotifier_mark_all_as_read), notificationState.getMarkAsReadIntent(context, masterSecret)); } InboxStyle style = new InboxStyle(); for (NotificationItem item : notifications) { style.addLine(item.getTickerText()); } builder.setStyle(style); setNotificationAlarms(context, builder, signal); if (signal) { builder.setTicker(notifications.get(0).getTickerText()); } ((NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE)) .notify(NOTIFICATION_ID, builder.build()); } private static void sendInThreadNotification(Context context) { try { if (!TextSecurePreferences.isInThreadNotifications(context)) { return; } String ringtone = TextSecurePreferences.getNotificationRingtone(context); if (ringtone == null) return; Uri uri = Uri.parse(ringtone); MediaPlayer player = new MediaPlayer(); player.setAudioStreamType(AudioManager.STREAM_NOTIFICATION); player.setDataSource(context, uri); player.setLooping(false); player.setVolume(0.25f, 0.25f); player.prepare(); final AudioManager audioManager = ((AudioManager)context.getSystemService(Context.AUDIO_SERVICE)); audioManager.requestAudioFocus(null, AudioManager.STREAM_NOTIFICATION, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK); player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { audioManager.abandonAudioFocus(null); } }); player.start(); } catch (IOException ioe) { Log.w("MessageNotifier", ioe); } } private static void appendPushNotificationState(Context context, MasterSecret masterSecret, NotificationState notificationState, Cursor cursor) { if (masterSecret != null) return; PushDatabase.Reader reader = null; IncomingPushMessage message; try { reader = DatabaseFactory.getPushDatabase(context).readerFor(cursor); while ((message = reader.getNext()) != null) { Recipient recipient; try { recipient = RecipientFactory.getRecipientsFromString(context, message.getSource(), false).getPrimaryRecipient(); } catch (RecipientFormattingException e) { Log.w("MessageNotifier", e); recipient = new Recipient("Unknown", "Unknown", null, ContactPhotoFactory.getDefaultContactPhoto(context)); } Recipients recipients = RecipientFactory.getRecipientsFromMessage(context, message, false); long threadId = DatabaseFactory.getThreadDatabase(context).getThreadIdFor(recipients); SpannableString body = new SpannableString(context.getString(R.string.MessageNotifier_encrypted_message)); body.setSpan(new StyleSpan(android.graphics.Typeface.ITALIC), 0, body.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); notificationState.addNotification(new NotificationItem(recipient, recipients, threadId, body, null)); } } finally { if (reader != null) reader.close(); } } private static NotificationState constructNotificationState(Context context, MasterSecret masterSecret, Cursor cursor) { NotificationState notificationState = new NotificationState(); MessageRecord record; MmsSmsDatabase.Reader reader; if (masterSecret == null) reader = DatabaseFactory.getMmsSmsDatabase(context).readerFor(cursor); else reader = DatabaseFactory.getMmsSmsDatabase(context).readerFor(cursor, masterSecret); while ((record = reader.getNext()) != null) { Recipient recipient = record.getIndividualRecipient(); Recipients recipients = record.getRecipients(); long threadId = record.getThreadId(); SpannableString body = record.getDisplayBody(); Uri image = null; // XXXX This is so fucked up. FIX ME! if (body.toString().equals(context.getString(R.string.MessageDisplayHelper_decrypting_please_wait))) { body = new SpannableString(context.getString(R.string.MessageNotifier_encrypted_message)); body.setSpan(new StyleSpan(android.graphics.Typeface.ITALIC), 0, body.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); } notificationState.addNotification(new NotificationItem(recipient, recipients, threadId, body, image)); } reader.close(); return notificationState; } private static void setNotificationAlarms(Context context, NotificationCompat.Builder builder, boolean signal) { String ringtone = TextSecurePreferences.getNotificationRingtone(context); boolean vibrate = TextSecurePreferences.isNotificationVibrateEnabled(context); String ledColor = TextSecurePreferences.getNotificationLedColor(context); String ledBlinkPattern = TextSecurePreferences.getNotificationLedPattern(context); String ledBlinkPatternCustom = TextSecurePreferences.getNotificationLedPatternCustom(context); String[] blinkPatternArray = parseBlinkPattern(ledBlinkPattern, ledBlinkPatternCustom); builder.setSound(TextUtils.isEmpty(ringtone) || !signal ? null : Uri.parse(ringtone)); if (signal && vibrate) builder.setDefaults(Notification.DEFAULT_VIBRATE); builder.setLights(Color.parseColor(ledColor), Integer.parseInt(blinkPatternArray[0]), Integer.parseInt(blinkPatternArray[1])); } private static String[] parseBlinkPattern(String blinkPattern, String blinkPatternCustom) { if (blinkPattern.equals("custom")) blinkPattern = blinkPatternCustom; return blinkPattern.split(","); } }
gpl-3.0
raisercostin/mucommander-2
src/main/com/mucommander/core/LocalLocationHistory.java
6600
/* * This file is part of muCommander, http://www.mucommander.com * Copyright (C) 2002-2012 Maxence Bernard * * muCommander is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * muCommander is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.mucommander.core; import java.util.List; import java.util.Vector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.mucommander.commons.file.AbstractFile; import com.mucommander.commons.file.FileFactory; import com.mucommander.commons.file.FileProtocols; import com.mucommander.commons.file.FileURL; import com.mucommander.commons.file.impl.local.LocalFile; import com.mucommander.ui.main.FolderPanel; /** * This class maintains a history of visited locations for a given tab, and provides methods to go back and go forward * in the history. * * <p>There is a limit to the number of locations the history can contain, defined by {@link #HISTORY_CAPACITY}.</p> * * @author Maxence Bernard, Arik Hadas */ public class LocalLocationHistory { private static final Logger LOGGER = LoggerFactory.getLogger(LocalLocationHistory.class); /** Maximum number of elements the folder history can contain */ private final static int HISTORY_CAPACITY = 100; /** List of visited locations, ordered by last visit date */ private List<FileURL> history = new Vector<>(HISTORY_CAPACITY+1); /** Index of current folder in history */ private int historyIndex = -1; /** FolderPanel which is being monitored */ private FolderPanel folderPanel; /** Last folder which can be recalled on next startup */ private String lastRecallableFolder; /** * Creates a new FolderHistory instance which will keep track of visited folders in the given FolderPanel. */ public LocalLocationHistory(FolderPanel folderPanel) { this.folderPanel = folderPanel; } /** * Adds the specified folder to history. The folder won't be added if the previous folder is the same. * * <p>This method is called by FolderPanel each time a folder is changed. */ public void tryToAddToHistory(FileURL folderURL) { // Do not add folder to history if new current folder is the same as previous folder if (historyIndex<0 || !folderURL.equals(history.get(historyIndex), false, false)) addToHistory(folderURL); // Save last recallable folder on startup, only if : // - it is a directory on a local filesytem // - it doesn't look like a removable media drive (cd/dvd/floppy), especially in order to prevent // Java from triggering that dreaded 'Drive not ready' popup. LOGGER.trace("folder="+folderURL); if(folderURL.getScheme().equals(FileProtocols.FILE)) { AbstractFile folder = FileFactory.getFile(folderURL); if (folder.isDirectory() && (folder instanceof LocalFile) && !((LocalFile)folder.getRoot()).guessRemovableDrive()) { this.lastRecallableFolder = folder.getAbsolutePath(); LOGGER.trace("lastRecallableFolder= "+lastRecallableFolder); } } } private void addToHistory(FileURL folderURL) { int historySize = history.size(); historyIndex++; // Delete 'forward' history items if any for(int i=historyIndex; i<historySize; i++) { history.remove(historyIndex); } // If capacity is reached, remove first folder if(history.size()>=HISTORY_CAPACITY) { history.remove(0); historyIndex--; } // Add previous folder to history history.add(folderURL); } /** * Changes current folder to be the previous one in folder history. * Does nothing if there is no previous folder in history. */ public synchronized void goBack() { if (historyIndex==0) return; folderPanel.tryChangeCurrentFolder(history.get(--historyIndex)); } /** * Changes current folder to be the next one in folder history. * Does nothing if there is no next folder in history. */ public synchronized void goForward() { if (historyIndex==history.size()-1) return; folderPanel.tryChangeCurrentFolder(history.get(++historyIndex)); } /** * Returns <code>true</code> if there is at least one folder 'back' in the history. */ public boolean hasBackFolder() { return historyIndex>0; } /** * Returns <code>true</code> if there is at least one folder 'forward' in the history. */ public boolean hasForwardFolder() { return historyIndex!=history.size()-1; } /** * Returns a list of 'back' folders, most recently visited folder first. The returned array may be empty if there * currently isn't any 'back' folder in history, but may never be null. */ public FileURL[] getBackFolders() { if(!hasBackFolder()) return new FileURL[0]; int backLen = historyIndex; FileURL urls[] = new FileURL[backLen]; int cur = 0; for(int i=historyIndex-1; i>=0; i--) urls[cur++] = history.get(i); return urls; } /** * Returns a list of 'forward' folders, most recently visited folder first. The returned array may be empty if there * currently isn't any 'forward' folder in history, but may never be null. */ public FileURL[] getForwardFolders() { if(!hasForwardFolder()) return new FileURL[0]; int historySize = history.size(); FileURL urls[] = new FileURL[historySize-historyIndex-1]; int cur = 0; for(int i=historyIndex+1; i<historySize; i++) urls[cur++] = history.get(i); return urls; } /** * Returns true if the folder history contains the given FileURL, either as a back or forward folder, or as the * current folder. */ public boolean historyContains(FileURL folderURL) { return history.contains(folderURL); } /** * Returns the last visited folder that can be saved when the application terminates, and recalled next time * the application is started. * * <p>The returned folder will NOT be a folder on a remote filesystem * which would be likely not to be reachable next time the app is started, or a removable media drive * (cd/dvd/floppy) under Windows, which would trigger a nasty 'drive not ready' popup dialog if the drive * is not available or the media has changed. */ public String getLastRecallableFolder() { return this.lastRecallableFolder; } }
gpl-3.0
aborg0/RapidMiner-Unuk
src_generated/com/rapidminer/deployment/client/wsimport/GetTopDownloads.java
753
package com.rapidminer.deployment.client.wsimport; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for getTopDownloads complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="getTopDownloads"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "getTopDownloads") public class GetTopDownloads { }
agpl-3.0
ShatalovYaroslav/scheduling-portal
scheduler-portal/src/main/java/org/ow2/proactive_grid_cloud_portal/scheduler/server/jaxb/MapRecord.java
4483
/* * ProActive Parallel Suite(TM): * The Open Source library for parallel and distributed * Workflows & Scheduling, Orchestration, Cloud Automation * and Big Data Analysis on Enterprise Grids & Clouds. * * Copyright (c) 2007 - 2017 ActiveEon * Contact: contact@activeeon.com * * This library is free software: you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation: version 3 of * the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. */ package org.ow2.proactive_grid_cloud_portal.scheduler.server.jaxb; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for mapRecord complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="mapRecord"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="map" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="task" type="{}taskRecord" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "mapRecord", propOrder = { "map" }) public class MapRecord { protected MapRecord.Map map; /** * Gets the value of the map property. * * @return * possible object is * {@link MapRecord.Map } * */ public MapRecord.Map getMap() { return map; } /** * Sets the value of the map property. * * @param value * allowed object is * {@link MapRecord.Map } * */ public void setMap(MapRecord.Map value) { this.map = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="task" type="{}taskRecord" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "task" }) public static class Map { protected List<TaskRecord> task; /** * Gets the value of the task property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the task property. * * <p> * For example, to add a new item, do as follows: * <pre> * getTask().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TaskRecord } * * */ public List<TaskRecord> getTask() { if (task == null) { task = new ArrayList<TaskRecord>(); } return this.task; } } }
agpl-3.0
4ment/beast-mcmc
src/dr/oldevomodel/treelikelihood/NativeAminoAcidLikelihoodCore.java
5461
/* * NativeAminoAcidLikelihoodCore.java * * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.oldevomodel.treelikelihood; @Deprecated // Switching to BEAGLE public class NativeAminoAcidLikelihoodCore extends AbstractLikelihoodCore{ public NativeAminoAcidLikelihoodCore() { super(20); } protected void calculateIntegratePartials(double[] inPartials, double[] proportions, double[] outPartials) { nativeIntegratePartials(inPartials, proportions, patternCount, matrixCount, outPartials); } protected void calculatePartialsPartialsPruning(double[] partials1, double[] matrices1, double[] partials2, double[] matrices2, double[] partials3) { nativePartialsPartialsPruning(partials1, matrices1, partials2, matrices2, patternCount, matrixCount, partials3); } protected void calculateStatesPartialsPruning(int[] states1, double[] matrices1, double[] partials2, double[] matrices2, double[] partials3) { nativeStatesPartialsPruning(states1, matrices1, partials2, matrices2, patternCount, matrixCount, partials3); } protected void calculateStatesStatesPruning(int[] states1, double[] matrices1, int[] states2, double[] matrices2, double[] partials3) { nativeStatesStatesPruning(states1, matrices1, states2, matrices2, patternCount, matrixCount, partials3); } protected void calculatePartialsPartialsPruning(double[] partials1, double[] matrices1, double[] partials2, double[] matrices2, double[] partials3, int[] matrixMap) { throw new RuntimeException("not implemented using matrixMap"); } protected void calculateStatesStatesPruning(int[] states1, double[] matrices1, int[] states2, double[] matrices2, double[] partials3, int[] matrixMap) { throw new RuntimeException("not implemented using matrixMap"); } protected void calculateStatesPartialsPruning(int[] states1, double[] matrices1, double[] partials2, double[] matrices2, double[] partials3, int[] matrixMap) { throw new RuntimeException("not implemented using matrixMap"); } public void calculateLogLikelihoods(double[] partials, double[] frequencies, double[] outLogLikelihoods) { int v = 0; for (int k = 0; k < patternCount; k++) { double sum = frequencies[0] * partials[v]; v++; sum += frequencies[1] * partials[v]; v++; sum += frequencies[2] * partials[v]; v++; sum += frequencies[3] * partials[v]; v++; sum += frequencies[4] * partials[v]; v++; sum += frequencies[5] * partials[v]; v++; sum += frequencies[6] * partials[v]; v++; sum += frequencies[7] * partials[v]; v++; sum += frequencies[8] * partials[v]; v++; sum += frequencies[9] * partials[v]; v++; sum += frequencies[10] * partials[v]; v++; sum += frequencies[11] * partials[v]; v++; sum += frequencies[12] * partials[v]; v++; sum += frequencies[13] * partials[v]; v++; sum += frequencies[14] * partials[v]; v++; sum += frequencies[15] * partials[v]; v++; sum += frequencies[16] * partials[v]; v++; sum += frequencies[17] * partials[v]; v++; sum += frequencies[18] * partials[v]; v++; sum += frequencies[19] * partials[v]; v++; outLogLikelihoods[k] = Math.log(sum) + getLogScalingFactor(k); } } public native void nativeIntegratePartials(double[] partials, double[] proportions, int patternCount, int matrixCount, double[] outPartials); protected native void nativePartialsPartialsPruning(double[] partials1, double[] matrices1, double[] partials2, double[] matrices2, int patternCount, int matrixCount, double[] partials3); protected native void nativeStatesPartialsPruning(int[] states1, double[] matrices1, double[] partials2, double[] matrices2, int patternCount, int matrixCount, double[] partials3); protected native void nativeStatesStatesPruning(int[] states1, double[] matrices1, int[] states2, double[] matrices2, int patternCount, int matrixCount, double[] partials3); public static boolean isAvailable(){ return isNativeAvailable; } private static boolean isNativeAvailable = false; static { try { System.loadLibrary("AminoAcidLikelihoodCore"); isNativeAvailable = true; } catch (UnsatisfiedLinkError e) { System.err.println("Using Java AminoAcid likelihood core " + e.toString()); System.err.println("Looking for AminoAcidLikelihoodCore in " + System.getProperty("java.library.path")); } } }
lgpl-2.1
liujed/polyglot-eclipse
src/polyglot/visit/InnerTranslator.java
52498
/******************************************************************************* * This file is part of the Polyglot extensible compiler framework. * * Copyright (c) 2000-2012 Polyglot project group, Cornell University * Copyright (c) 2006-2012 IBM Corporation * All rights reserved. * * This program and the accompanying materials are made available under * the terms of the Eclipse Public License v1.0 which accompanies this * distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * This program and the accompanying materials are made available under * the terms of the Lesser GNU Public License v2.0 which accompanies this * distribution. * * The development of the Polyglot project has been supported by a * number of funding sources, including DARPA Contract F30602-99-1-0533, * monitored by USAF Rome Laboratory, ONR Grants N00014-01-1-0968 and * N00014-09-1-0652, NSF Grants CNS-0208642, CNS-0430161, CCF-0133302, * and CCF-1054172, AFRL Contract FA8650-10-C-7022, an Alfred P. Sloan * Research Fellowship, and an Intel Research Ph.D. Fellowship. * * See README for contributors. ******************************************************************************/ package polyglot.visit; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Stack; import polyglot.ast.Assign; import polyglot.ast.Block; import polyglot.ast.Call; import polyglot.ast.ClassBody; import polyglot.ast.ClassDecl; import polyglot.ast.ClassMember; import polyglot.ast.CodeDecl; import polyglot.ast.ConstructorCall; import polyglot.ast.ConstructorDecl; import polyglot.ast.Expr; import polyglot.ast.Field; import polyglot.ast.FieldAssign; import polyglot.ast.FieldDecl; import polyglot.ast.Formal; import polyglot.ast.Id; import polyglot.ast.Local; import polyglot.ast.LocalClassDecl; import polyglot.ast.LocalDecl; import polyglot.ast.MethodDecl; import polyglot.ast.New; import polyglot.ast.Node; import polyglot.ast.NodeFactory; import polyglot.ast.ProcedureDecl; import polyglot.ast.Special; import polyglot.ast.Stmt; import polyglot.ast.TypeNode; import polyglot.types.ClassType; import polyglot.types.CodeInstance; import polyglot.types.ConstructorInstance; import polyglot.types.FieldInstance; import polyglot.types.Flags; import polyglot.types.LocalInstance; import polyglot.types.MethodInstance; import polyglot.types.ParsedClassType; import polyglot.types.ReferenceType; import polyglot.types.SemanticException; import polyglot.types.Type; import polyglot.types.TypeSystem; import polyglot.util.InternalCompilerError; import polyglot.util.Position; /** * @author xinqi * * This class translates inner classes to static nested classes with a field referring to the enclosing * instance. It will also add "new" methods to the enclosing class corresponding to constructors of inner * classes. * */ public class InnerTranslator extends NodeVisitor { protected TypeSystem ts; protected NodeFactory nf; protected class ClassInfo { ParsedClassType ct; Map<String, Integer> localNameCount; // Count how many local/anonymous classes with a particular name have appeared. List<ClassDecl> newMemberClasses; // New member class declarations converted from local/anonymous classes. List<MethodDecl> newMemberMethods; // New member methods added. List<Formal> newConsFormals; // The list of added formals to constructors. // The first one should be the reference to the outer class instance, // and the remaining ones are the final locals. List<ClassInfo> innerClassInfo; // List of inner class info. boolean hasOuterField; CodeInfo insideCode; // For local/anonymous classes, this is the code where the declaration is. public ClassInfo(ParsedClassType ct) { this.ct = ct; localNameCount = new HashMap<>(); newMemberClasses = new LinkedList<>(); newMemberMethods = new LinkedList<>(); newConsFormals = new LinkedList<>(); innerClassInfo = new LinkedList<>(); hasOuterField = false; insideCode = null; } @Override public String toString() { return ct.toString(); } // For anonymous classes, name would be "". public int addLocalClassName(String name) { if (localNameCount.containsKey(name)) { int i = localNameCount.get(name); localNameCount.put(name, i + 1); return i; } else { localNameCount.put(name, 1); return 0; } } // Generate new names for local/anonymous classes. public String localClassName(String name, int nameCount) { String thisName = ct.fullName(); return namePrefix() + thisName + "$" + nameCount + name; } public ParsedClassType classType() { return ct; } public void addConsFormal(Formal f) { newConsFormals.add(f); FieldInstance fi = ts.fieldInstance(Position.compilerGenerated(), ct, Flags.PROTECTED, f.type().type(), newFieldName(f.name())); ct.addField(fi); } public List<Formal> newConsFormals() { return newConsFormals; } public List<ClassDecl> newMemberClasses() { return newMemberClasses; } public void addMemberClass(ClassDecl cd) { newMemberClasses.add(cd); ct.addMemberClass(cd.type()); } public List<MethodDecl> newMemberMethods() { return newMemberMethods; } public void addMemberMethods(MethodDecl md) { newMemberMethods.add(md); ct.addMethod(md.methodInstance()); } // public ClassType memberClassNamed(String name) { // return cd.type().memberClassNamed(name); // } public void addInnerClassInfo(ClassInfo cinfo) { innerClassInfo.add(cinfo); } public ClassInfo findInnerClassInfo(ClassType ct) { for (ClassInfo cinfo : innerClassInfo) { if (cinfo.classType().equals(ct)) { return cinfo; } } return null; } public void hasOuterField(boolean b) { hasOuterField = b; } public boolean hasOuterField() { return hasOuterField; } public CodeInfo insideCode() { return insideCode; } public void insideCode(CodeInfo ci) { insideCode = ci; } } // Information about methods, constructors, and initializers. protected class CodeInfo { CodeInstance ci; List<LocalInstance> finalArgs; // the list of final arguments if any. List<ClassInfo> localClassInfo; // List of local/anonymous class info. Stack<LinkedList<LocalInstance>> blockFinals; // stack of lists of final variables defined in a block. public CodeInfo(CodeInstance ci) { this.ci = ci; finalArgs = new LinkedList<>(); localClassInfo = new LinkedList<>(); blockFinals = new Stack<>(); } @Override public String toString() { return ci.toString(); } public void addFinalArg(LocalInstance li) { finalArgs.add(li); } public void pushBlock() { blockFinals.push(new LinkedList<LocalInstance>()); } public void popBlock() { blockFinals.pop(); } public void addFinalLocal(LocalInstance li) { List<LocalInstance> current = blockFinals.peek(); current.add(li); } public List<LocalInstance> finalList() { List<LocalInstance> result = new LinkedList<>(); result.addAll(finalArgs); for (List<LocalInstance> l : blockFinals) { result.addAll(l); } return result; } public ClassInfo findLocalClassInfo(ClassType ct) { for (ClassInfo cinfo : localClassInfo) { if (cinfo.classType().equals(ct)) { return cinfo; } } return null; } /* * Add the name of a local/anonymous class, where anonymous classes have name "". */ public void addLocalClassInfo(ClassInfo cinfo) { localClassInfo.add(cinfo); } /* * Return whether the code is static. * Local/anonymous classes inside static code should not have outer field. */ public boolean isStatic() { return ci.flags().isStatic(); } /* * Check whether a final local variable with the specified name exists. */ public boolean existFinal(String name) { for (int i = blockFinals.size() - 1; i >= 0; i--) { List<LocalInstance> l = blockFinals.get(i); for (LocalInstance li : l) { if (li.name().equals(name)) return true; } } for (LocalInstance li : finalArgs) { if (li.name().equals(name)) return true; } return false; } } protected Stack<ClassInfo> classContext; // The context stack of all the enclosing classes. // It is a stack of ClassInfo. protected Stack<CodeInfo> codeContext; // The context stack of all the enclosing code. // It is a stack of CodeInfo. protected HashMap<String, ClassInfo> innerClassInfoMap; // The map from full names to class infos of inner classes. protected Stack<Boolean> insideCode; // Boolean stack that indicates whether it is inside a piece of code now. protected Stack<Boolean> staticFieldDecl; // Boolean stack that indicates whether it is inside // the initialization of a static field. protected String namePrefix() { return "jl$"; } /* * Generate the new name for a field that comes from a final local variable. */ protected String newFieldName(String name) { return namePrefix() + name; } public InnerTranslator(TypeSystem ts, NodeFactory nf) { super(nf.lang()); this.ts = ts; this.nf = nf; classContext = new Stack<>(); codeContext = new Stack<>(); innerClassInfoMap = new HashMap<>(); insideCode = new Stack<>(); staticFieldDecl = new Stack<>(); } @Override public NodeVisitor enter(Node n) { if (n instanceof ClassDecl) { ClassDecl cd = (ClassDecl) n; enterClassDecl(cd); } else if (n instanceof New) { New newExpr = (New) n; enterNew(newExpr); } else if (n instanceof CodeDecl) { CodeDecl cd = (CodeDecl) n; enterCodeDecl(cd); } else if (n instanceof Block) { CodeInfo cinfo = codeContext.peek(); cinfo.pushBlock(); } else if (n instanceof LocalDecl) { LocalDecl ld = (LocalDecl) n; enterLocalDecl(ld); } else if (n instanceof ClassBody) { insideCode.push(false); } else if (n instanceof FieldDecl) { FieldDecl fd = (FieldDecl) n; enterFieldDecl(fd); } return this; } protected void enterFieldDecl(FieldDecl fd) { if (fd.flags().isStatic()) { staticFieldDecl.push(true); } else { staticFieldDecl.push(false); } } protected void enterClassDecl(ClassDecl cd) { ParsedClassType ct = cd.type(); ClassInfo cinfo = new ClassInfo(ct); if (ct.isInnerClass() && ct.isMember()) { ClassInfo classInfo = classContext.peek(); cinfo.addConsFormal(produceOuterFormal(ct, classInfo.classType())); cinfo.hasOuterField(true); classInfo.addInnerClassInfo(cinfo); innerClassInfoMap.put(ct.fullName(), cinfo); } if (ct.isLocal()) { CodeInfo codeInfo = codeContext.peek(); ClassInfo classInfo = classContext.peek(); if (!codeInfo.isStatic()) { // If this local/anonymous class is inside a static method, // then it shouldn't have an outer field. cinfo.addConsFormal(produceOuterFormal(cd.type(), classInfo.classType())); cinfo.hasOuterField(true); } codeInfo.addLocalClassInfo(cinfo); cinfo.insideCode(codeInfo); ct.kind(ClassType.MEMBER); ct.outer(classInfo.classType()); ct.needSerialization(false); // local classes don't need serialization. String className = classInfo.localClassName(cd.name(), classInfo.addLocalClassName(cd.name())); ct.name(className); for (LocalInstance li : codeInfo.finalList()) { Id name = nf.Id(Position.compilerGenerated(), li.name()); Formal f = nf.Formal(Position.compilerGenerated(), Flags.NONE, nf.CanonicalTypeNode(Position.compilerGenerated(), li.type()), name); f = f.localInstance(ts.localInstance(Position.compilerGenerated(), f.flags(), f.type().type(), f.name())); cinfo.addConsFormal(f); } innerClassInfoMap.put(ct.fullName(), cinfo); } classContext.push(cinfo); } protected void enterNew(New newExpr) { if (newExpr.body() != null) { // If this is an anonymous class declaration. ParsedClassType ct = newExpr.anonType(); ct.flags(Flags.NONE); ClassInfo cinfo = new ClassInfo(ct); // Check whether the anonymous class is defined outside a code (as the initialization of a field) boolean inCode = insideCode.peek().booleanValue(); CodeInfo codeInfo = null; if (inCode) { codeInfo = codeContext.peek(); } ClassInfo classInfo = classContext.peek(); if (inCode && !codeInfo.isStatic() || !inCode && !staticFieldDecl.peek().booleanValue()) { // If this local/anonymous class is inside a static method, // then it shouldn't have an outer field. cinfo.addConsFormal(produceOuterFormal(ct, classInfo.classType())); cinfo.hasOuterField(true); } if (inCode) { codeInfo.addLocalClassInfo(cinfo); } else { classInfo.addInnerClassInfo(cinfo); } cinfo.insideCode(codeInfo); ct.kind(ClassType.MEMBER); ct.outer(classInfo.classType()); ct.needSerialization(false); // anonymous classes don't need serialization. String className = classInfo.localClassName("", classInfo.addLocalClassName("")); ct.name(className); if (inCode) { for (LocalInstance li : codeInfo.finalList()) { Id name = nf.Id(Position.compilerGenerated(), li.name()); Formal f = nf.Formal(Position.compilerGenerated(), Flags.NONE, nf.CanonicalTypeNode(Position.compilerGenerated(), li.type()), name); f = f.localInstance(ts.localInstance(Position.compilerGenerated(), f.flags(), f.type().type(), f.name())); cinfo.addConsFormal(f); } } innerClassInfoMap.put(ct.fullName(), cinfo); classContext.push(cinfo); } } protected void enterCodeDecl(CodeDecl cd) { CodeInfo cinfo = new CodeInfo(cd.codeInstance()); // If it is a constructor or method, find all the final arguments and add them to the finalVar map. if (cd instanceof ProcedureDecl) { ProcedureDecl pd = (ProcedureDecl) cd; for (Formal f : pd.formals()) { if (f.flags().isFinal()) { cinfo.addFinalArg(f.localInstance()); } } } codeContext.push(cinfo); insideCode.push(true); } protected void enterLocalDecl(LocalDecl ld) { if (ld.flags().isFinal()) { codeContext.peek().addFinalLocal(ld.localInstance()); } } @Override public Node leave(Node old, Node n, NodeVisitor v) { if (n instanceof ClassDecl) { ClassDecl cd = (ClassDecl) n; return leaveClassDecl(old, cd, v); } else if (n instanceof New) { New newExpr = (New) n; return leaveNew(old, newExpr, v); } else if (n instanceof ConstructorCall) { ConstructorCall cc = (ConstructorCall) n; return leaveConstructorCall(old, cc, v); } else if (n instanceof Special) { Special s = (Special) n; return leaveSpecial(old, s, v); } else if (n instanceof Field) { Field field = (Field) n; return leaveField(old, field, v); } else if (n instanceof Call) { // Like Field accesses, we might also have method calls that have no explicit "A.this." // qualifiers, while it means to. Call c = (Call) n; return leaveCall(old, c, v); } else if (n instanceof LocalClassDecl) { // Need to remove local class declarations. return nf.Empty(Position.compilerGenerated()); } else if (n instanceof CodeDecl) { codeContext.pop(); insideCode.pop(); } else if (n instanceof Block) { CodeInfo cinfo = codeContext.peek(); cinfo.popBlock(); } else if (n instanceof Local) { Local local = (Local) n; return leaveLocal(old, local, v); } else if (n instanceof ClassBody) { insideCode.pop(); } else if (n instanceof FieldDecl) { staticFieldDecl.pop(); } return n; } protected Node leaveClassDecl(Node old, ClassDecl cd, NodeVisitor v) { ParsedClassType ct = cd.type(); ClassInfo selfInfo = classContext.pop(); // Do nothing if it is already a static class, or it is a toplevel class, // but need to add those classes converted from local/anonymous classes. if (ct.flags().isStatic() || ct.isTopLevel()) { if (selfInfo.newMemberClasses().size() > 0 || selfInfo.newMemberMethods().size() > 0) { cd = addNewMembers(cd, selfInfo); } return cd; } // Deal with ordinary inner classes. if (selfInfo.insideCode() == null) { cd = updateClassDecl(cd, ct, selfInfo); } else { ClassInfo cinfo = classContext.peek(); cd = cd.name(ct.name()); cd = updateClassDecl(cd, ct, selfInfo); cinfo.addMemberClass(cd); } return cd; } protected Node leaveNew(Node old, New newExpr, NodeVisitor v) { if (newExpr.body() != null) { // Anonymous class declaration. // Need to create a class declaration, and add it to the enclosing class. ParsedClassType ct = newExpr.anonType(); Id name = nf.Id(Position.compilerGenerated(), ct.name()); ClassDecl cd = nf.ClassDecl(Position.compilerGenerated(), ct.flags(), name, nf.CanonicalTypeNode(Position.compilerGenerated(), ct.superType()), Collections.<TypeNode> emptyList(), newExpr.body()); cd = cd.type(ct); ClassInfo selfInfo = classContext.pop(); ClassInfo cinfo = classContext.peek(); cd = cd.name(ct.name()); cd = addAnonymousConstructor(cd, ct, selfInfo, newExpr); cd = updateClassDecl(cd, ct, selfInfo); cinfo.addMemberClass(cd); newExpr = (New) newExpr.type(ct); } return updateNewExpr(newExpr); } protected Node leaveConstructorCall(Node old, ConstructorCall cc, NodeVisitor v) { ClassInfo cinfo = classContext.peek(); return updateConstructorCall(cc, cinfo); } protected Node leaveSpecial(Node old, Special s, NodeVisitor v) { if (s.kind() == Special.THIS && s.qualifier() != null) { ClassType tOuter = (ClassType) s.qualifier().type(); ClassType tThis = classContext.peek().classType(); Expr t = s.qualifier(null); while (!ts.equals(tOuter, tThis)) { // t = nf.Field(Position.compilerGenerated(), t, newFieldName(outerThisName(tThis))); t = produceOuterField(tThis, t); tThis = tThis.outer(); } return t; } return s; } protected Node leaveField(Node old, Field field, NodeVisitor v) { // Check whether the field access is a disguised form of "A.this.f". // Note: we only need to check non-static fields! if (!field.flags().isStatic() && field.isTargetImplicit()) { ClassType tThis = classContext.peek().classType(); ClassType tOuter = findField(field.name(), tThis); Expr t = produceThis(tThis); while (!ts.equals(tOuter, tThis)) { // t = nf.Field(Position.compilerGenerated(), t, newFieldName(outerThisName(tThis))); t = produceOuterField(tThis, t); tThis = tThis.outer(); } Id name = nf.Id(Position.compilerGenerated(), field.name()); Field f = nf.Field(field.position(), t, name); f = f.fieldInstance(field.fieldInstance()); return f; } return field; } protected Node leaveCall(Node old, Call c, NodeVisitor v) { MethodInstance mi = c.methodInstance(); if (!mi.flags().isStatic() && c.isTargetImplicit()) { ClassType tThis = classContext.peek().classType(); ClassType tOuter = findMethod(mi, tThis); Expr t = produceThis(tThis); while (!ts.equals(tOuter, tThis)) { // t = nf.Field(Position.compilerGenerated(), t, newFieldName(outerThisName(tThis))); t = produceOuterField(tThis, t); tThis = tThis.outer(); } Call nc = c.target(t).targetImplicit(false); return nc; } return c; } /** * Translate final local variables that should become field accesses of local/anonymous classes. * @param old * @param local * @param v */ protected Node leaveLocal(Node old, Local local, NodeVisitor v) { if (local.flags().isFinal()) { CodeInfo codeInfo = codeContext.peek(); if (!codeInfo.existFinal(local.name())) { String newName = newFieldName(local.name()); ClassType tThis = classContext.peek().classType(); ClassType tOuter = findField(newName, tThis); Expr t = produceThis(tThis); while (!ts.equals(tOuter, tThis)) { // t = nf.Field(Position.compilerGenerated(), t, newFieldName(outerThisName(tThis))); t = produceOuterField(tThis, t); tThis = tThis.outer(); } Id id = nf.Id(Position.compilerGenerated(), newName); Field f = nf.Field(Position.compilerGenerated(), t, id); f = f.fieldInstance(ts.fieldInstance(Position.compilerGenerated(), (ReferenceType) t.type(), Flags.PROTECTED, local.type(), f.name())); return f; } } return local; } /** * Generate a special node "this" with the correct type. * @param ct */ protected Special produceThis(ClassType ct) { Special s = nf.Special(Position.compilerGenerated(), Special.THIS); s = (Special) s.type(ct); return s; } /* * Add the constructor for an anonymous class. */ protected ClassDecl addAnonymousConstructor(ClassDecl cd, ParsedClassType ct, ClassInfo cinfo, New newExpr) { List<Formal> formals = new ArrayList<>(newExpr.arguments().size() + 1); List<Expr> args = new ArrayList<>(newExpr.arguments().size() + 1); List<Type> ftypes = new ArrayList<>(newExpr.arguments().size() + 1); int i = 0; for (Expr arg : newExpr.arguments()) { Id id = nf.Id(Position.compilerGenerated(), "arg" + i); Formal f = nf.Formal(Position.compilerGenerated(), Flags.NONE, nf.CanonicalTypeNode(Position.compilerGenerated(), arg.type()), id); LocalInstance li = ts.localInstance(Position.compilerGenerated(), Flags.NONE, arg.type(), "arg" + i); f = f.localInstance(li); formals.add(f); Local l = nf.Local(Position.compilerGenerated(), id); l = l.localInstance(li); args.add(l); ftypes.add(arg.type()); } ConstructorCall cc = nf.SuperCall(Position.compilerGenerated(), args); cc = cc.constructorInstance(newExpr.constructorInstance()); cc = updateConstructorCall(cc, cinfo); Id cid = nf.Id(Position.compilerGenerated(), ct.name()); ConstructorDecl cons = nf.ConstructorDecl(Position.compilerGenerated(), Flags.NONE, cid, formals, Collections.<TypeNode> emptyList(), nf.Block(Position.compilerGenerated(), cc)); ConstructorInstance consInst = ts.constructorInstance(Position.compilerGenerated(), ct, Flags.NONE, ftypes, Collections.<Type> emptyList()); cons = cons.constructorInstance(consInst); List<ClassMember> members = cd.body().members(); List<ClassMember> newMembers = new ArrayList<>(members.size() + 1); newMembers.add(cons); newMembers.addAll(members); cd = cd.body(nf.ClassBody(cd.body().position(), newMembers)); return cd; } /** * Find the class type inside which a field with specified name is declared. */ protected ParsedClassType findField(String name, ClassType current) { for (int i = classContext.size() - 1; i >= 0; i--) { ClassInfo cinfo = classContext.get(i); ParsedClassType ct = cinfo.classType(); try { ts.findField(ct, name, current, true); } catch (SemanticException se) { continue; } return ct; } throw new InternalCompilerError("Unable to find field " + name + "."); } /** * Find the class type inside which a field with specified name is declared. */ protected ParsedClassType findMethod(MethodInstance mi, ClassType current) { for (int i = classContext.size() - 1; i >= 0; i--) { ClassInfo cinfo = classContext.get(i); ParsedClassType ct = cinfo.classType(); try { ts.findMethod(ct, mi.name(), mi.formalTypes(), current, true); } catch (SemanticException se) { continue; } return ct; } throw new InternalCompilerError("Unable to find " + mi + "."); } protected ConstructorCall updateConstructorCall(ConstructorCall cc, ClassInfo selfInfo) { ConstructorInstance ci = cc.constructorInstance(); ClassType ct = (ClassType) ci.container(); if (cc.kind().equals(ConstructorCall.THIS)) { // If calling a constructor of the same class, just need to pass all the new formals. ClassInfo cinfo = classContext.peek(); ci = updateConstructorInst(ct, ci, cinfo); List<Formal> formals = cinfo.newConsFormals(); List<Expr> args = new ArrayList<>(cc.arguments().size() + formals.size()); args.addAll(cc.arguments()); for (Formal f : formals) { Id id = nf.Id(Position.compilerGenerated(), f.name()); Local l = nf.Local(Position.compilerGenerated(), id); l = l.localInstance(f.localInstance()); l = (Local) l.type(f.type().type()); args.add(l); } cc = (ConstructorCall) cc.arguments(args); cc = cc.constructorInstance(ci); } else { ClassInfo cinfo = innerClassInfoMap.get(ct.fullName()); if (cinfo != null) { // it is an inner class. if (cinfo.insideCode() == null) { // For member inner classes, only need to add one formal that refers to the outer instance. ci = updateConstructorInst(ct, ci, cinfo); List<Expr> args = new ArrayList<>(cc.arguments().size() + 1); Formal f = selfInfo.newConsFormals().get(0); Id id = nf.Id(Position.compilerGenerated(), outerThisName(ct)); Local l = nf.Local(Position.compilerGenerated(), id); l = l.localInstance(f.localInstance()); l = (Local) l.type(f.type().type()); args.addAll(cc.arguments()); args.add(l); cc = (ConstructorCall) cc.arguments(args); cc = cc.constructorInstance(ci); } else if (selfInfo.insideCode() == cinfo.insideCode()) { // The super class is a local class, and they are within the same code. ci = updateConstructorInst(ct, ci, cinfo); List<Formal> formals = cinfo.newConsFormals(); List<Expr> args = new ArrayList<>(cc.arguments().size() + formals.size()); args.addAll(cc.arguments()); for (Formal f : formals) { Id id = nf.Id(Position.compilerGenerated(), f.name()); args.add(nf.Local(Position.compilerGenerated(), id)); } cc = (ConstructorCall) cc.arguments(args); cc = cc.constructorInstance(ci); } else { // The super class is a local class, and they are not within the same code. // Need to first find an enclosing class that is inside the same code as the super class, // and use its fields as arguments. Id id = nf.Id(Position.compilerGenerated(), outerThisName(ct)); Local outerLocal = nf.Local(Position.compilerGenerated(), id); outerLocal = outerLocal.localInstance(selfInfo.newConsFormals() .get(0) .localInstance()); Expr outer = outerLocal; ClassType outerCt = selfInfo.classType().outer(); ClassType tThis = ct; ClassInfo outerCInfo = innerClassInfoMap.get(outerCt.fullName()); while (outerCInfo.insideCode() != cinfo.insideCode()) { // outer = nf.Field(Position.compilerGenerated(), outer, newFieldName(outerThisName(tThis))); outer = produceOuterField(tThis, outer); tThis = outerCt; outerCt = outerCt.outer(); outerCInfo = innerClassInfoMap.get(outerCt.fullName()); } ci = updateConstructorInst(ct, ci, cinfo); List<Formal> formals = cinfo.newConsFormals(); List<Expr> args = new ArrayList<>(cc.arguments().size() + formals.size()); args.addAll(cc.arguments()); for (Formal f : formals) { Id fid = nf.Id(Position.compilerGenerated(), newFieldName(f.name())); args.add(nf.Field(Position.compilerGenerated(), outer, fid)); } cc = (ConstructorCall) cc.arguments(args); cc = cc.constructorInstance(ci); } } } return cc; } /* * Add new member classes/methods. */ protected ClassDecl addNewMembers(ClassDecl cd, ClassInfo cinfo) { List<ClassMember> members = new ArrayList<>(cd.body().members().size() + cinfo.newMemberClasses().size() + cinfo.newMemberMethods().size()); members.addAll(cd.body().members()); members.addAll(cinfo.newMemberClasses()); members.addAll(cinfo.newMemberMethods()); ClassBody b = nf.ClassBody(cd.body().position(), members); b = (ClassBody) b.exceptions(cd.body().exceptions()); cd = cd.body(b); return cd; } /** * Find ClassInfo for ClassType ct, from innerClassInfoMap. * @param ct */ protected ClassInfo findClassInfo(ClassType ct) { ClassInfo cinfo = innerClassInfoMap.get(ct.fullName()); return cinfo; } /** * Check whether ct is a type in source language. * @param ct */ protected boolean isSourceType(ClassType ct) { return true; } /** * Update new expressions to include necessary arguments (for example, * enclosing instances) and eliminate qualifers. * @param newExpr */ protected Expr updateNewExpr(New newExpr) { ClassType ct = (ClassType) newExpr.type(); ClassInfo classInfo = classContext.peek(); ClassInfo cinfo = findClassInfo(ct); // boolean inCode = ((Boolean)insideCode.peek()).booleanValue(); // if (inCode) { // CodeInfo codeInfo = (CodeInfo)codeContext.peek(); // cinfo = codeInfo.findLocalClassInfo(ct); // } // if (cinfo == null) { // cinfo = classInfo.findInnerClassInfo(ct); // } if (cinfo != null) { ConstructorInstance ci = newExpr.constructorInstance(); List<Formal> formals = cinfo.newConsFormals(); List<Expr> args = new ArrayList<>(newExpr.arguments().size() + formals.size()); List<Type> ftypes = new ArrayList<>(newExpr.arguments().size() + formals.size()); args.addAll(newExpr.arguments()); ftypes.addAll(ci.formalTypes()); Iterator<Formal> it = formals.iterator(); if (cinfo.hasOuterField()) { if (newExpr.qualifier() != null) { args.add(newExpr.qualifier()); ftypes.add(newExpr.qualifier().type()); } else { args.add(nf.This(Position.compilerGenerated())); ftypes.add(classInfo.classType()); } it.next(); // Only if there is an outer field, the first argument needs to be skipped. } for (; it.hasNext();) { Formal f = it.next(); Id id = nf.Id(Position.compilerGenerated(), f.name()); args.add(nf.Local(Position.compilerGenerated(), id)); ftypes.add(f.type().type()); } New nExpr = newExpr.arguments(args); ci.setFormalTypes(ftypes); if (newExpr.anonType() != null) { ci.setContainer(newExpr.anonType()); nExpr = nExpr.objectType(nf.CanonicalTypeNode(Position.compilerGenerated(), newExpr.anonType())); } nExpr = nExpr.qualifier(null); nExpr = nExpr.anonType(null); nExpr = nExpr.body(null); nExpr = nExpr.constructorInstance(ci); return nExpr; } else if (ct.isInnerClass() && isSourceType(ct)) { // Maybe we have encountered the new expression of an inner class before it is translated. // But we have to make sure that ct is a type in the source language. ConstructorInstance ci = newExpr.constructorInstance(); List<Expr> args = new ArrayList<>(newExpr.arguments().size() + 1); List<Type> ftypes = new ArrayList<>(newExpr.arguments().size() + 1); args.addAll(newExpr.arguments()); ftypes.addAll(ci.formalTypes()); if (newExpr.qualifier() != null) { args.add(newExpr.qualifier()); ftypes.add(newExpr.qualifier().type()); } else { args.add(nf.This(Position.compilerGenerated())); ftypes.add(classContext.peek().classType()); } ci.setFormalTypes(ftypes); New nExpr = newExpr.arguments(args); nExpr = nExpr.qualifier(null); nExpr = nExpr.constructorInstance(ci); return nExpr; } return newExpr; } protected ConstructorDecl produceDefaultConstructor(ParsedClassType ct, ClassInfo cinfo) { ConstructorCall cc = nf.ConstructorCall(Position.compilerGenerated(), ConstructorCall.SUPER, Collections.<Expr> emptyList()); ConstructorInstance cci = ts.constructorInstance(Position.compilerGenerated(), (ClassType) ct.superType(), Flags.PUBLIC, // XXX: how to find the real flags? Collections.<Type> emptyList(), Collections.<Type> emptyList()); cc = cc.constructorInstance(cci); cc = updateConstructorCall(cc, cinfo); Id id = nf.Id(Position.compilerGenerated(), ct.name()); ConstructorDecl cd = nf.ConstructorDecl(Position.compilerGenerated(), Flags.PUBLIC, id, Collections.<Formal> emptyList(), Collections.<TypeNode> emptyList(), nf.Block(Position.compilerGenerated(), cc)); ConstructorInstance cdi = ts.constructorInstance(Position.compilerGenerated(), ct, Flags.PUBLIC, Collections.<Type> emptyList(), Collections.<Type> emptyList()); cd = cd.constructorInstance(cdi); return cd; } protected ClassDecl updateClassDecl(ClassDecl cd, ParsedClassType ct, ClassInfo cinfo) { Flags f = ct.flags().Static(); ct.flags(f); List<ClassMember> members = new LinkedList<>(); List<FieldDecl> fields = produceFieldDecls(ct, cinfo); members.addAll(fields); // for (Iterator it = fields.iterator(); it.hasNext(); ) { // FieldDecl fd = (FieldDecl)it.next(); // ct.addField(fd.fieldInstance()); // } ct.setConstructors(Collections.<ConstructorInstance> emptyList()); for (ClassMember m : cd.body().members()) { if (m instanceof ConstructorDecl) { ConstructorDecl cons = (ConstructorDecl) m; ConstructorDecl newCons = updateConstructor(cd, ct, cons, cinfo); members.add(newCons); ct.addConstructor(newCons.constructorInstance()); } else { members.add(m); } } if (ct.constructors().size() == 0) { // Add a default constructor for inner classes, if there is none, // in case that it is inherited from another inner class, and // the default constructor is not having "default" behavior. ConstructorDecl cons = updateConstructor(cd, ct, produceDefaultConstructor(ct, cinfo), cinfo); members.add(cons); ct.addConstructor(cons.constructorInstance()); } List<ClassDecl> newMemClasses = cinfo.newMemberClasses(); members.addAll(newMemClasses); List<MethodDecl> newMethods = cinfo.newMemberMethods(); members.addAll(newMethods); // for (Iterator it = newMemClasses.iterator(); it.hasNext(); ) { // ClassDecl memCd = (ClassDecl)it.next(); // ct.addMemberClass(memCd.type()); // } ClassBody cb = cd.body(); cb = cb.members(members); cd = cd.body(cb); cd = cd.type(ct); cd = cd.flags(f); return cd; } protected List<FieldDecl> produceFieldDecls(ClassType ct, ClassInfo cinfo) { List<Formal> newFormals = cinfo.newConsFormals(); List<FieldDecl> fields = new ArrayList<>(newFormals.size()); for (Formal formal : newFormals) { Id id = nf.Id(Position.compilerGenerated(), newFieldName(formal.name())); FieldDecl fd = nf.FieldDecl(Position.compilerGenerated(), Flags.PROTECTED, formal.type(), id); FieldInstance fi = ts.fieldInstance(Position.compilerGenerated(), ct, Flags.PROTECTED, formal.type().type(), newFieldName(formal.name())); fd = fd.fieldInstance(fi); fields.add(fd); } return fields; } // Return the name of the "outer this" field and formal in ct. protected String outerThisName(ClassType ct) { return "outer$this"; } /* * ct - ParsedClassType of the inner class that we are dealing with. * oct - ParsedClassType of the outer class. */ protected Formal produceOuterFormal(ParsedClassType ct, ParsedClassType oct) { Id fn = nf.Id(Position.compilerGenerated(), outerThisName(ct)); Formal formal = nf.Formal(Position.compilerGenerated(), Flags.NONE, nf.CanonicalTypeNode(Position.compilerGenerated(), oct), fn); formal = formal.localInstance(ts.localInstance(Position.compilerGenerated(), formal.flags(), formal.type().type(), formal.name())); return formal; } protected Field produceOuterField(ClassType ct, Expr rec) { Id id = nf.Id(Position.compilerGenerated(), newFieldName(outerThisName(ct))); Field f = nf.Field(Position.compilerGenerated(), rec, id); f = f.fieldInstance(ts.fieldInstance(Position.compilerGenerated(), ct, Flags.PROTECTED, ct.container(), // FIXME: use the type of outer formal stored in cinfo? f.name())); return f; } protected ConstructorInstance updateConstructorInst(ClassType ct, ConstructorInstance ci, ClassInfo cinfo) { List<Formal> newFormals = cinfo.newConsFormals(); List<Type> ftypes = new ArrayList<>(ci.formalTypes().size() + newFormals.size()); ftypes.addAll(ci.formalTypes()); for (Formal f : newFormals) { ftypes.add(f.type().type()); } ci.setFormalTypes(ftypes); ci.setContainer(ct); return ci; } protected ConstructorCall produceDefaultSuperConstructorCall(ClassType ct) { ConstructorCall superCc = nf.ConstructorCall(Position.compilerGenerated(), ConstructorCall.SUPER, Collections.<Expr> emptyList()); ConstructorInstance superCi = ts.constructorInstance(Position.compilerGenerated(), (ClassType) ct.superType(), Flags.PUBLIC, Collections.<Type> emptyList(), Collections.<Type> emptyList()); superCc = superCc.constructorInstance(superCi); superCc = updateConstructorCall(superCc, classContext.peek()); return superCc; } // Add new argument(s) to a constructor protected ConstructorDecl updateConstructor(ClassDecl cd, ClassType ct, ConstructorDecl cons, ClassInfo cinfo) { List<Formal> newFormals = cinfo.newConsFormals(); List<Formal> formals = new ArrayList<>(cons.formals().size() + newFormals.size()); formals.addAll(cons.formals()); formals.addAll(newFormals); List<Stmt> oldStmts = cons.body().statements(); List<Stmt> stmts = new ArrayList<>(oldStmts.size() + newFormals.size()); Iterator<Stmt> it = oldStmts.iterator(); // Check whether the first statement is a constructor call. if (it.hasNext()) { Stmt s = it.next(); if (s instanceof ConstructorCall) { stmts.add(s); // If it calls another constructor in the same class, we don't need to initialize the field. if (((ConstructorCall) s).kind() != ConstructorCall.THIS) { stmts.addAll(produceFieldInits(cinfo)); } } else { // If there is no explicit constructor call, we need to add the default // constructor call, and update it, in case it is from another inner class, // and therefore needs adding new formals. stmts.add(produceDefaultSuperConstructorCall(ct)); stmts.addAll(produceFieldInits(cinfo)); stmts.add(s); } } else { stmts.add(produceDefaultSuperConstructorCall(ct)); stmts.addAll(produceFieldInits(cinfo)); } while (it.hasNext()) { stmts.add(it.next()); } Block b = nf.Block(Position.compilerGenerated(), stmts); Id id = nf.Id(Position.compilerGenerated(), ct.name()); ConstructorDecl newCons = nf.ConstructorDecl(Position.compilerGenerated(), cons.flags(), id, formals, cons.throwTypes(), b); newCons = newCons.constructorInstance(updateConstructorInst(ct, cons.constructorInstance(), cinfo)); return newCons; } // Generate a list that contains all the field assignments for initializing newly added fields. protected List<Stmt> produceFieldInits(ClassInfo cinfo) { List<Formal> newFormals = cinfo.newConsFormals(); List<Stmt> fInits = new ArrayList<>(newFormals.size()); for (Formal formal : newFormals) { Id formalId = nf.Id(Position.compilerGenerated(), formal.name()); Local local = nf.Local(Position.compilerGenerated(), formalId); local = local.localInstance(formal.localInstance()); Special thisExpr = nf.This(Position.compilerGenerated()); thisExpr = (Special) thisExpr.type(cinfo.classType()); Id fieldId = nf.Id(Position.compilerGenerated(), newFieldName(formal.name())); Field field = nf.Field(Position.compilerGenerated(), thisExpr, fieldId); field = field.fieldInstance(ts.fieldInstance(Position.compilerGenerated(), cinfo.classType(), Flags.PROTECTED, formal.type().type(), field.name())); FieldAssign fAssign = nf.FieldAssign(Position.compilerGenerated(), field, Assign.ASSIGN, local); Stmt stmt = nf.Eval(Position.compilerGenerated(), fAssign); fInits.add(stmt); } return fInits; } }
lgpl-2.1
rbraeunlich/ReliableADGSASimulation
target/classes/lib/peersim-1.0.5/src/peersim/core/IdleProtocol.java
4282
/* * Copyright (c) 2003-2005 The BISON Project * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License version 2 as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. * */ package peersim.core; import peersim.config.Configuration; /** * A protocol that stores links. It does nothing apart from that. * It is useful to model a static link-structure * (topology). The only function of this protocol is to serve as a source of * neighborhood information for other protocols. */ public class IdleProtocol implements Protocol, Linkable { // -------------------------------------------------------------------------- // Parameters // -------------------------------------------------------------------------- /** * Default init capacity */ private static final int DEFAULT_INITIAL_CAPACITY = 10; /** * Initial capacity. Defaults to {@value #DEFAULT_INITIAL_CAPACITY}. * @config */ private static final String PAR_INITCAP = "capacity"; // -------------------------------------------------------------------------- // Fields // -------------------------------------------------------------------------- /** Neighbors */ protected Node[] neighbors; /** Actual number of neighbors in the array */ protected int len; // -------------------------------------------------------------------------- // Initialization // -------------------------------------------------------------------------- public IdleProtocol(String s) { neighbors = new Node[Configuration.getInt(s + "." + PAR_INITCAP, DEFAULT_INITIAL_CAPACITY)]; len = 0; } //-------------------------------------------------------------------------- public Object clone() { IdleProtocol ip = null; try { ip = (IdleProtocol) super.clone(); } catch( CloneNotSupportedException e ) {} // never happens ip.neighbors = new Node[neighbors.length]; System.arraycopy(neighbors, 0, ip.neighbors, 0, len); ip.len = len; return ip; } // -------------------------------------------------------------------------- // Methods // -------------------------------------------------------------------------- public boolean contains(Node n) { for (int i = 0; i < len; i++) { if (neighbors[i] == n) return true; } return false; } // -------------------------------------------------------------------------- /** Adds given node if it is not already in the network. There is no limit * to the number of nodes that can be added. */ public boolean addNeighbor(Node n) { for (int i = 0; i < len; i++) { if (neighbors[i] == n) return false; } if (len == neighbors.length) { Node[] temp = new Node[3 * neighbors.length / 2]; System.arraycopy(neighbors, 0, temp, 0, neighbors.length); neighbors = temp; } neighbors[len] = n; len++; return true; } // -------------------------------------------------------------------------- public Node getNeighbor(int i) { return neighbors[i]; } // -------------------------------------------------------------------------- public int degree() { return len; } // -------------------------------------------------------------------------- public void pack() { if (len == neighbors.length) return; Node[] temp = new Node[len]; System.arraycopy(neighbors, 0, temp, 0, len); neighbors = temp; } // -------------------------------------------------------------------------- public String toString() { if( neighbors == null ) return "DEAD!"; StringBuffer buffer = new StringBuffer(); buffer.append("len=" + len + " maxlen=" + neighbors.length + " ["); for (int i = 0; i < len; ++i) { buffer.append(neighbors[i].getIndex() + " "); } return buffer.append("]").toString(); } // -------------------------------------------------------------------------- public void onKill() { neighbors = null; len = 0; } }
lgpl-2.1
cfallin/soot
src/soot/jimple/spark/pag/MethodPAG.java
11382
/* Soot - a J*va Optimization Framework * Copyright (C) 2003 Ondrej Lhotak * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. */ package soot.jimple.spark.pag; import java.util.Collections; import java.util.HashSet; import java.util.Set; import soot.ArrayType; import soot.Body; import soot.Context; import soot.EntryPoints; import soot.G; import soot.RefLikeType; import soot.RefType; import soot.Scene; import soot.SootClass; import soot.SootMethod; import soot.Type; import soot.Unit; import soot.VoidType; import soot.jimple.Stmt; import soot.jimple.spark.builder.MethodNodeFactory; import soot.jimple.spark.internal.SparkLibraryHelper; import soot.options.CGOptions; import soot.util.NumberedString; import soot.util.queue.ChunkedQueue; import soot.util.queue.QueueReader; /** Part of a pointer assignment graph for a single method. * @author Ondrej Lhotak */ public final class MethodPAG { private PAG pag; public PAG pag() { return pag; } protected MethodPAG( PAG pag, SootMethod m ) { this.pag = pag; this.method = m; this.nodeFactory = new MethodNodeFactory( pag, this ); } private Set<Context> addedContexts; /** Adds this method to the main PAG, with all VarNodes parameterized by * varNodeParameter. */ public void addToPAG( Context varNodeParameter ) { if( !hasBeenBuilt ) throw new RuntimeException(); if( varNodeParameter == null ) { if( hasBeenAdded ) return; hasBeenAdded = true; } else { if( addedContexts == null ) addedContexts = new HashSet<Context>(); if( !addedContexts.add( varNodeParameter ) ) return; } QueueReader<Node> reader = internalReader.clone(); while(reader.hasNext()) { Node src = (Node) reader.next(); src = parameterize( src, varNodeParameter ); Node dst = (Node) reader.next(); dst = parameterize( dst, varNodeParameter ); pag.addEdge( src, dst ); } reader = inReader.clone(); while(reader.hasNext()) { Node src = (Node) reader.next(); Node dst = (Node) reader.next(); dst = parameterize( dst, varNodeParameter ); pag.addEdge( src, dst ); } reader = outReader.clone(); while(reader.hasNext()) { Node src = (Node) reader.next(); src = parameterize( src, varNodeParameter ); Node dst = (Node) reader.next(); pag.addEdge( src, dst ); } } public void addInternalEdge( Node src, Node dst ) { if( src == null ) return; internalEdges.add( src ); internalEdges.add( dst ); if (hasBeenAdded) { pag.addEdge(src, dst); } } public void addInEdge( Node src, Node dst ) { if( src == null ) return; inEdges.add( src ); inEdges.add( dst ); if (hasBeenAdded) { pag.addEdge(src, dst); } } public void addOutEdge( Node src, Node dst ) { if( src == null ) return; outEdges.add( src ); outEdges.add( dst ); if (hasBeenAdded) { pag.addEdge(src, dst); } } private final ChunkedQueue<Node> internalEdges = new ChunkedQueue<Node>(); private final ChunkedQueue<Node> inEdges = new ChunkedQueue<Node>(); private final ChunkedQueue<Node> outEdges = new ChunkedQueue<Node>(); private final QueueReader<Node> internalReader = internalEdges.reader(); private final QueueReader<Node> inReader = inEdges.reader(); private final QueueReader<Node> outReader = outEdges.reader(); SootMethod method; public SootMethod getMethod() { return method; } protected MethodNodeFactory nodeFactory; public MethodNodeFactory nodeFactory() { return nodeFactory; } public static MethodPAG v( PAG pag, SootMethod m ) { MethodPAG ret = G.v().MethodPAG_methodToPag.get( m ); if( ret == null ) { ret = new MethodPAG( pag, m ); G.v().MethodPAG_methodToPag.put( m, ret ); } return ret; } public void build() { if( hasBeenBuilt ) return; hasBeenBuilt = true; if( method.isNative() ) { if( pag().getOpts().simulate_natives() ) { buildNative(); } } else { if( method.isConcrete() && !method.isPhantom() ) { buildNormal(); } } addMiscEdges(); } protected VarNode parameterize( LocalVarNode vn, Context varNodeParameter ) { SootMethod m = vn.getMethod(); if( m != method && m != null ) throw new RuntimeException( "VarNode "+vn+" with method "+m+" parameterized in method "+method ); //System.out.println( "parameterizing "+vn+" with "+varNodeParameter ); return pag().makeContextVarNode( vn, varNodeParameter ); } protected FieldRefNode parameterize( FieldRefNode frn, Context varNodeParameter ) { return pag().makeFieldRefNode( (VarNode) parameterize( frn.getBase(), varNodeParameter ), frn.getField() ); } public Node parameterize( Node n, Context varNodeParameter ) { if( varNodeParameter == null ) return n; if( n instanceof LocalVarNode ) return parameterize( (LocalVarNode) n, varNodeParameter); if( n instanceof FieldRefNode ) return parameterize( (FieldRefNode) n, varNodeParameter); return n; } protected boolean hasBeenAdded = false; protected boolean hasBeenBuilt = false; protected void buildNormal() { Body b = method.retrieveActiveBody(); for (Unit u : b.getUnits()) nodeFactory.handleStmt( (Stmt) u ); } protected void buildNative() { ValNode thisNode = null; ValNode retNode = null; if( !method.isStatic() ) { thisNode = (ValNode) nodeFactory.caseThis(); } if(method.getReturnType() instanceof RefLikeType ) { retNode = (ValNode) nodeFactory.caseRet(); // on library analysis we assume that the return type of an native method can // be anything matching to the declared type. if (pag.getCGOpts().library() != CGOptions.library_disabled) { Type retType = method.getReturnType(); retType.apply(new SparkLibraryHelper(pag, retNode, method)); } } ValNode[] args = new ValNode[ method.getParameterCount() ]; for( int i = 0; i < method.getParameterCount(); i++ ) { if( !( method.getParameterType(i) instanceof RefLikeType ) ) continue; args[i] = (ValNode) nodeFactory.caseParm(i); } pag.nativeMethodDriver.process( method, thisNode, retNode, args ); } private final static String mainSubSignature = SootMethod.getSubSignature( "main", Collections.<Type>singletonList( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v() ); protected void addMiscEdges() { // Add node for parameter (String[]) in main method final String signature = method.getSignature(); if( method.getSubSignature().equals( mainSubSignature )) { addInEdge( pag().nodeFactory().caseArgv(), nodeFactory.caseParm(0) ); } else if(signature.equals( "<java.lang.Thread: void <init>(java.lang.ThreadGroup,java.lang.String)>" ) ) { addInEdge( pag().nodeFactory().caseMainThread(), nodeFactory.caseThis() ); addInEdge( pag().nodeFactory().caseMainThreadGroup(), nodeFactory.caseParm( 0 ) ); } else if (signature.equals( "<java.lang.ref.Finalizer: void <init>(java.lang.Object)>")) { addInEdge( nodeFactory.caseThis(), pag().nodeFactory().caseFinalizeQueue()); } else if (signature.equals( "<java.lang.ref.Finalizer: void runFinalizer()>")) { addInEdge(pag.nodeFactory().caseFinalizeQueue(), nodeFactory.caseThis()); } else if (signature.equals( "<java.lang.ref.Finalizer: void access$100(java.lang.Object)>")) { addInEdge(pag.nodeFactory().caseFinalizeQueue(), nodeFactory.caseParm(0)); } else if (signature.equals( "<java.lang.ClassLoader: void <init>()>")) { addInEdge(pag.nodeFactory().caseDefaultClassLoader(), nodeFactory.caseThis()); } else if (signature.equals("<java.lang.Thread: void exit()>")) { addInEdge(pag.nodeFactory().caseMainThread(), nodeFactory.caseThis()); } else if (signature.equals( "<java.security.PrivilegedActionException: void <init>(java.lang.Exception)>")) { addInEdge(pag.nodeFactory().caseThrow(), nodeFactory.caseParm(0)); addInEdge(pag.nodeFactory().casePrivilegedActionException(), nodeFactory.caseThis()); } if (method.getNumberedSubSignature().equals(sigCanonicalize)) { SootClass cl = method.getDeclaringClass(); while (true) { if (cl.equals(Scene.v().getSootClass("java.io.FileSystem"))) { addInEdge(pag.nodeFactory().caseCanonicalPath(), nodeFactory.caseRet()); } if (!cl.hasSuperclass()) break; cl = cl.getSuperclass(); } } boolean isImplicit = false; for (SootMethod implicitMethod : EntryPoints.v().implicit()) { if (implicitMethod.getNumberedSubSignature().equals( method.getNumberedSubSignature())) { isImplicit = true; break; } } if (isImplicit) { SootClass c = method.getDeclaringClass(); outer: do { while (!c.getName().equals("java.lang.ClassLoader")) { if (!c.hasSuperclass()) { break outer; } c = c.getSuperclass(); } if (method.getName().equals("<init>")) continue; addInEdge(pag().nodeFactory().caseDefaultClassLoader(), nodeFactory.caseThis()); addInEdge(pag().nodeFactory().caseMainClassNameString(), nodeFactory.caseParm(0)); } while (false); } } protected final NumberedString sigCanonicalize = Scene.v().getSubSigNumberer(). findOrAdd("java.lang.String canonicalize(java.lang.String)"); }
lgpl-2.1
druid-io/druid
processing/src/main/java/org/apache/druid/segment/filter/Filters.java
22573
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.filter; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import it.unimi.dsi.fastutil.ints.IntIterable; import it.unimi.dsi.fastutil.ints.IntIterator; import it.unimi.dsi.fastutil.ints.IntList; import org.apache.druid.collections.bitmap.ImmutableBitmap; import org.apache.druid.java.util.common.IAE; import org.apache.druid.query.BitmapResultFactory; import org.apache.druid.query.Query; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.filter.BitmapIndexSelector; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.filter.DruidPredicateFactory; import org.apache.druid.query.filter.Filter; import org.apache.druid.query.filter.FilterTuning; import org.apache.druid.query.filter.ValueMatcher; import org.apache.druid.segment.ColumnProcessors; import org.apache.druid.segment.ColumnSelector; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.IntIteratorUtils; import org.apache.druid.segment.column.BitmapIndex; import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.data.CloseableIndexed; import org.apache.druid.segment.data.Indexed; import org.apache.druid.segment.filter.cnf.CalciteCnfHelper; import org.apache.druid.segment.filter.cnf.HiveCnfHelper; import org.apache.druid.segment.join.filter.AllNullColumnSelectorFactory; import javax.annotation.Nullable; import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; /** * */ public class Filters { private static final ColumnSelectorFactory ALL_NULL_COLUMN_SELECTOR_FACTORY = new AllNullColumnSelectorFactory(); /** * Convert a list of DimFilters to a list of Filters. * * @param dimFilters list of DimFilters, should all be non-null * * @return list of Filters */ public static List<Filter> toFilters(List<DimFilter> dimFilters) { return dimFilters.stream().map(Filters::toFilter).collect(Collectors.toList()); } /** * Convert a DimFilter to a Filter. * * @param dimFilter dimFilter * * @return converted filter, or null if input was null */ @Nullable public static Filter toFilter(@Nullable DimFilter dimFilter) { return dimFilter == null ? null : dimFilter.toOptimizedFilter(); } /** * Create a ValueMatcher that compares row values to the provided string. * <p> * An implementation of this method should be able to handle dimensions of various types. * * @param columnSelectorFactory Selector for columns. * @param columnName The column to filter. * @param value The value to match against, represented as a String. * * @return An object that matches row values on the provided value. */ public static ValueMatcher makeValueMatcher( final ColumnSelectorFactory columnSelectorFactory, final String columnName, final String value ) { return ColumnProcessors.makeProcessor( columnName, new ConstantValueMatcherFactory(value), columnSelectorFactory ); } /** * Create a ValueMatcher that applies a predicate to row values. * <p> * The caller provides a predicate factory that can create a predicate for each value type supported by Druid. * See {@link DruidPredicateFactory} for more information. * <p> * When creating the ValueMatcher, the ValueMatcherFactory implementation should decide what type of predicate * to create from the predicate factory based on the ValueType of the specified dimension. * * @param columnSelectorFactory Selector for columns. * @param columnName The column to filter. * @param predicateFactory Predicate factory * * @return An object that applies a predicate to row values */ public static ValueMatcher makeValueMatcher( final ColumnSelectorFactory columnSelectorFactory, final String columnName, final DruidPredicateFactory predicateFactory ) { return ColumnProcessors.makeProcessor( columnName, new PredicateValueMatcherFactory(predicateFactory), columnSelectorFactory ); } public static ImmutableBitmap allFalse(final BitmapIndexSelector selector) { return selector.getBitmapFactory().makeEmptyImmutableBitmap(); } public static ImmutableBitmap allTrue(final BitmapIndexSelector selector) { return selector.getBitmapFactory() .complement(selector.getBitmapFactory().makeEmptyImmutableBitmap(), selector.getNumRows()); } /** * Transform an iterable of indexes of bitmaps to an iterable of bitmaps * * @param indexes indexes of bitmaps * @param bitmapIndex an object to retrieve bitmaps using indexes * * @return an iterable of bitmaps */ public static Iterable<ImmutableBitmap> bitmapsFromIndexes(final IntIterable indexes, final BitmapIndex bitmapIndex) { // Do not use Iterables.transform() to avoid boxing/unboxing integers. return new Iterable<ImmutableBitmap>() { @Override public Iterator<ImmutableBitmap> iterator() { final IntIterator iterator = indexes.iterator(); return new Iterator<ImmutableBitmap>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public ImmutableBitmap next() { return bitmapIndex.getBitmap(iterator.nextInt()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } /** * Return the union of bitmaps for all values matching a particular predicate. * * @param dimension dimension to look at * @param selector bitmap selector * @param bitmapResultFactory * @param predicate predicate to use * * @return bitmap of matching rows * * @see #estimateSelectivity(String, BitmapIndexSelector, Predicate) */ public static <T> T matchPredicate( final String dimension, final BitmapIndexSelector selector, BitmapResultFactory<T> bitmapResultFactory, final Predicate<String> predicate ) { return bitmapResultFactory.unionDimensionValueBitmaps(matchPredicateNoUnion(dimension, selector, predicate)); } /** * Return an iterable of bitmaps for all values matching a particular predicate. Unioning these bitmaps * yields the same result that {@link #matchPredicate(String, BitmapIndexSelector, BitmapResultFactory, Predicate)} * would have returned. * * @param dimension dimension to look at * @param selector bitmap selector * @param predicate predicate to use * * @return iterable of bitmaps of matching rows */ public static Iterable<ImmutableBitmap> matchPredicateNoUnion( final String dimension, final BitmapIndexSelector selector, final Predicate<String> predicate ) { Preconditions.checkNotNull(dimension, "dimension"); Preconditions.checkNotNull(selector, "selector"); Preconditions.checkNotNull(predicate, "predicate"); // Missing dimension -> match all rows if the predicate matches null; match no rows otherwise try (final CloseableIndexed<String> dimValues = selector.getDimensionValues(dimension)) { if (dimValues == null || dimValues.size() == 0) { return ImmutableList.of(predicate.apply(null) ? allTrue(selector) : allFalse(selector)); } // Apply predicate to all dimension values and union the matching bitmaps final BitmapIndex bitmapIndex = selector.getBitmapIndex(dimension); return makePredicateQualifyingBitmapIterable(bitmapIndex, predicate, dimValues); } catch (IOException e) { throw new UncheckedIOException(e); } } /** * Return an estimated selectivity for bitmaps of all values matching the given predicate. * * @param dimension dimension to look at * @param indexSelector bitmap selector * @param predicate predicate to use * * @return estimated selectivity * * @see #matchPredicate(String, BitmapIndexSelector, BitmapResultFactory, Predicate) */ public static double estimateSelectivity( final String dimension, final BitmapIndexSelector indexSelector, final Predicate<String> predicate ) { Preconditions.checkNotNull(dimension, "dimension"); Preconditions.checkNotNull(indexSelector, "selector"); Preconditions.checkNotNull(predicate, "predicate"); // Missing dimension -> match all rows if the predicate matches null; match no rows otherwise try (final CloseableIndexed<String> dimValues = indexSelector.getDimensionValues(dimension)) { if (dimValues == null || dimValues.size() == 0) { return predicate.apply(null) ? 1. : 0.; } // Apply predicate to all dimension values and union the matching bitmaps final BitmapIndex bitmapIndex = indexSelector.getBitmapIndex(dimension); return estimateSelectivity( bitmapIndex, IntIteratorUtils.toIntList( makePredicateQualifyingIndexIterable(bitmapIndex, predicate, dimValues).iterator() ), indexSelector.getNumRows() ); } catch (IOException e) { throw new UncheckedIOException(e); } } /** * Return an estimated selectivity for bitmaps for the dimension values given by dimValueIndexes. * * @param bitmapIndex bitmap index * @param bitmaps bitmaps to extract, by index * @param totalNumRows number of rows in the column associated with this bitmap index * * @return estimated selectivity */ public static double estimateSelectivity( final BitmapIndex bitmapIndex, final IntList bitmaps, final long totalNumRows ) { long numMatchedRows = 0; for (int i = 0; i < bitmaps.size(); i++) { final ImmutableBitmap bitmap = bitmapIndex.getBitmap(bitmaps.getInt(i)); numMatchedRows += bitmap.size(); } return Math.min(1., (double) numMatchedRows / totalNumRows); } /** * Return an estimated selectivity for bitmaps given by an iterator. * * @param bitmaps iterator of bitmaps * @param totalNumRows number of rows in the column associated with this bitmap index * * @return estimated selectivity */ public static double estimateSelectivity( final Iterator<ImmutableBitmap> bitmaps, final long totalNumRows ) { long numMatchedRows = 0; while (bitmaps.hasNext()) { final ImmutableBitmap bitmap = bitmaps.next(); numMatchedRows += bitmap.size(); } return Math.min(1., (double) numMatchedRows / totalNumRows); } private static Iterable<ImmutableBitmap> makePredicateQualifyingBitmapIterable( final BitmapIndex bitmapIndex, final Predicate<String> predicate, final Indexed<String> dimValues ) { return bitmapsFromIndexes(makePredicateQualifyingIndexIterable(bitmapIndex, predicate, dimValues), bitmapIndex); } private static IntIterable makePredicateQualifyingIndexIterable( final BitmapIndex bitmapIndex, final Predicate<String> predicate, final Indexed<String> dimValues ) { return new IntIterable() { @Override public IntIterator iterator() { return new IntIterator() { private final int bitmapIndexCardinality = bitmapIndex.getCardinality(); private int nextIndex = 0; private int found; { found = findNextIndex(); } private int findNextIndex() { while (nextIndex < bitmapIndexCardinality && !predicate.apply(dimValues.get(nextIndex))) { nextIndex++; } if (nextIndex < bitmapIndexCardinality) { return nextIndex++; } else { return -1; } } @Override public boolean hasNext() { return found != -1; } @Override public int nextInt() { int foundIndex = this.found; if (foundIndex == -1) { throw new NoSuchElementException(); } this.found = findNextIndex(); return foundIndex; } }; } }; } public static boolean supportsSelectivityEstimation( Filter filter, String dimension, ColumnSelector columnSelector, BitmapIndexSelector indexSelector ) { if (filter.supportsBitmapIndex(indexSelector)) { final ColumnHolder columnHolder = columnSelector.getColumnHolder(dimension); if (columnHolder != null) { return columnHolder.getCapabilities().hasMultipleValues().isFalse(); } } return false; } @Nullable public static Filter convertToCNFFromQueryContext(Query query, @Nullable Filter filter) { if (filter == null) { return null; } boolean useCNF = query.getContextBoolean(QueryContexts.USE_FILTER_CNF_KEY, QueryContexts.DEFAULT_USE_FILTER_CNF); return useCNF ? Filters.toCnf(filter) : filter; } public static Filter toCnf(Filter current) { // Push down NOT filters to leaves if possible to remove NOT on NOT filters and reduce hierarchy. // ex) ~(a OR ~b) => ~a AND b current = HiveCnfHelper.pushDownNot(current); // Flatten nested AND and OR filters if possible. // ex) a AND (b AND c) => a AND b AND c current = HiveCnfHelper.flatten(current); // Pull up AND filters first to convert the filter into a conjunctive form. // It is important to pull before CNF conversion to not create a huge CNF. // ex) (a AND b) OR (a AND c AND d) => a AND (b OR (c AND d)) current = CalciteCnfHelper.pull(current); // Convert filter to CNF. // a AND (b OR (c AND d)) => a AND (b OR c) AND (b OR d) current = HiveCnfHelper.convertToCnf(current); // Flatten again to remove any flattenable nested AND or OR filters created during CNF conversion. current = HiveCnfHelper.flatten(current); return current; } /** * This method provides a "standard" implementation of {@link Filter#shouldUseBitmapIndex(BitmapIndexSelector)} which takes * a {@link Filter}, a {@link BitmapIndexSelector}, and {@link FilterTuning} to determine if: * a) the filter supports bitmap indexes for all required columns * b) the filter tuning specifies that it should use the index * c) the cardinality of the column is above the minimum threshold and below the maximum threshold to use the index * * If all these things are true, {@link org.apache.druid.segment.QueryableIndexStorageAdapter} will utilize the * indexes. */ public static boolean shouldUseBitmapIndex( Filter filter, BitmapIndexSelector indexSelector, @Nullable FilterTuning filterTuning ) { final FilterTuning tuning = filterTuning != null ? filterTuning : FilterTuning.createDefault(filter, indexSelector); if (filter.supportsBitmapIndex(indexSelector) && tuning.getUseBitmapIndex()) { return filter.getRequiredColumns().stream().allMatch(column -> { final BitmapIndex index = indexSelector.getBitmapIndex(column); Preconditions.checkNotNull(index, "Column does not have a bitmap index"); final int cardinality = index.getCardinality(); return cardinality >= tuning.getMinCardinalityToUseBitmapIndex() && cardinality <= tuning.getMaxCardinalityToUseBitmapIndex(); }); } return false; } /** * Create a filter representing an AND relationship across a list of filters. Deduplicates filters, flattens stacks, * and removes null filters and literal "false" filters. * * @param filters List of filters * * @return If "filters" has more than one filter remaining after processing, returns {@link AndFilter}. * If "filters" has a single element remaining after processing, return that filter alone. * * @throws IllegalArgumentException if "filters" is empty or only contains nulls */ public static Filter and(final List<Filter> filters) { return maybeAnd(filters).orElseThrow(() -> new IAE("Expected nonempty filters list")); } /** * Like {@link #and}, but returns an empty Optional instead of throwing an exception if "filters" is empty * or only contains nulls. */ public static Optional<Filter> maybeAnd(List<Filter> filters) { final List<Filter> nonNullFilters = nonNull(filters); if (nonNullFilters.isEmpty()) { return Optional.empty(); } final LinkedHashSet<Filter> filtersToUse = flattenAndChildren(nonNullFilters); if (filtersToUse.isEmpty()) { assert !filters.isEmpty(); // Original "filters" list must have been 100% literally-true filters. return Optional.of(TrueFilter.instance()); } else if (filtersToUse.stream().anyMatch(filter -> filter instanceof FalseFilter)) { // AND of FALSE with anything is FALSE. return Optional.of(FalseFilter.instance()); } else if (filtersToUse.size() == 1) { return Optional.of(Iterables.getOnlyElement(filtersToUse)); } else { return Optional.of(new AndFilter(filtersToUse)); } } /** * Create a filter representing an OR relationship across a list of filters. Deduplicates filters, flattens stacks, * and removes null filters and literal "false" filters. * * @param filters List of filters * * @return If "filters" has more than one filter remaining after processing, returns {@link OrFilter}. * If "filters" has a single element remaining after processing, return that filter alone. * * @throws IllegalArgumentException if "filters" is empty */ public static Filter or(final List<Filter> filters) { return maybeOr(filters).orElseThrow(() -> new IAE("Expected nonempty filters list")); } /** * Like {@link #or}, but returns an empty Optional instead of throwing an exception if "filters" is empty * or only contains nulls. */ public static Optional<Filter> maybeOr(final List<Filter> filters) { final List<Filter> nonNullFilters = nonNull(filters); if (nonNullFilters.isEmpty()) { return Optional.empty(); } final LinkedHashSet<Filter> filtersToUse = flattenOrChildren(nonNullFilters); if (filtersToUse.isEmpty()) { assert !nonNullFilters.isEmpty(); // Original "filters" list must have been 100% literally-false filters. return Optional.of(FalseFilter.instance()); } else if (filtersToUse.stream().anyMatch(filter -> filter instanceof TrueFilter)) { // OR of TRUE with anything is TRUE. return Optional.of(TrueFilter.instance()); } else if (filtersToUse.size() == 1) { return Optional.of(Iterables.getOnlyElement(filtersToUse)); } else { return Optional.of(new OrFilter(filtersToUse)); } } /** * @param filter the filter. * * @return The normalized or clauses for the provided filter. */ public static List<Filter> toNormalizedOrClauses(Filter filter) { Filter normalizedFilter = Filters.toCnf(filter); // List of candidates for pushdown // CNF normalization will generate either // - an AND filter with multiple subfilters // - or a single non-AND subfilter which cannot be split further List<Filter> normalizedOrClauses; if (normalizedFilter instanceof AndFilter) { normalizedOrClauses = new ArrayList<>(((AndFilter) normalizedFilter).getFilters()); } else { normalizedOrClauses = Collections.singletonList(normalizedFilter); } return normalizedOrClauses; } public static boolean filterMatchesNull(Filter filter) { ValueMatcher valueMatcher = filter.makeMatcher(ALL_NULL_COLUMN_SELECTOR_FACTORY); return valueMatcher.matches(); } /** * Returns a list equivalent to the input list, but with nulls removed. If the original list has no nulls, * it is returned directly. */ private static List<Filter> nonNull(final List<Filter> filters) { if (filters.stream().anyMatch(Objects::isNull)) { return filters.stream().filter(Objects::nonNull).collect(Collectors.toList()); } else { return filters; } } /** * Flattens children of an AND, removes duplicates, and removes literally-true filters. */ private static LinkedHashSet<Filter> flattenAndChildren(final Collection<Filter> filters) { final LinkedHashSet<Filter> retVal = new LinkedHashSet<>(); for (Filter child : filters) { if (child instanceof AndFilter) { retVal.addAll(flattenAndChildren(((AndFilter) child).getFilters())); } else if (!(child instanceof TrueFilter)) { retVal.add(child); } } return retVal; } /** * Flattens children of an OR, removes duplicates, and removes literally-false filters. */ private static LinkedHashSet<Filter> flattenOrChildren(final Collection<Filter> filters) { final LinkedHashSet<Filter> retVal = new LinkedHashSet<>(); for (Filter child : filters) { if (child instanceof OrFilter) { retVal.addAll(flattenOrChildren(((OrFilter) child).getFilters())); } else if (!(child instanceof FalseFilter)) { retVal.add(child); } } return retVal; } }
apache-2.0
bclozel/spring-boot
spring-boot-project/spring-boot-tools/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/bundling/LaunchScriptConfiguration.java
3223
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.gradle.tasks.bundling; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import org.springframework.boot.loader.tools.FileUtils; /** * Encapsulates the configuration of the launch script for an executable jar or war. * * @author Andy Wilkinson * @since 2.0.0 */ @SuppressWarnings("serial") public class LaunchScriptConfiguration implements Serializable { private final Map<String, String> properties = new HashMap<>(); private File script; /** * Returns the properties that are applied to the launch script when it's being * including in the executable archive. * @return the properties */ public Map<String, String> getProperties() { return this.properties; } /** * Sets the properties that are applied to the launch script when it's being including * in the executable archive. * @param properties the properties */ public void properties(Map<String, String> properties) { this.properties.putAll(properties); } /** * Returns the script {@link File} that will be included in the executable archive. * When {@code null}, the default launch script will be used. * @return the script file */ public File getScript() { return this.script; } /** * Sets the script {@link File} that will be included in the executable archive. When * {@code null}, the default launch script will be used. * @param script the script file */ public void setScript(File script) { this.script = script; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.properties == null) ? 0 : this.properties.hashCode()); result = prime * result + ((this.script == null) ? 0 : this.script.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } LaunchScriptConfiguration other = (LaunchScriptConfiguration) obj; if (!this.properties.equals(other.properties)) { return false; } if (this.script == null) { if (other.script != null) { return false; } } else if (!this.script.equals(other.script)) { return false; } else if (!equalContents(this.script, other.script)) { return false; } return true; } private boolean equalContents(File one, File two) { try { return FileUtils.sha1Hash(one).equals(FileUtils.sha1Hash(two)); } catch (IOException ex) { return false; } } }
apache-2.0
dotty-staging/dotty
tests/pos/i10567/SchemaBuilder_1.java
482
public class SchemaBuilder_1 { public static class Schema {} public static TypeBuilder<Schema> builder() { throw new UnsupportedOperationException(); } public static class NamespacedBuilder<R, S extends NamespacedBuilder<R, S>> {} public static class FixedBuilder<R> extends NamespacedBuilder<R, FixedBuilder<R>> {} public static class TypeBuilder<R> { public FixedBuilder<R> fixed(String name) { throw new UnsupportedOperationException(); } } }
apache-2.0
intel-hadoop/incubator-sentry
sentry-tests/sentry-tests-solr/src/test/java/org/apache/sentry/tests/e2e/solr/db/integration/TestSolrUpdateOperations.java
4358
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sentry.tests.e2e.solr.db.integration; import java.io.File; import org.apache.sentry.core.model.search.SearchConstants; import org.apache.solr.common.SolrInputDocument; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TestSolrUpdateOperations extends AbstractSolrSentryTestWithDbProvider { private static final Logger LOG = LoggerFactory.getLogger(TestSolrUpdateOperations.class); private static final String TEST_COLLECTION_NAME1 = "collection1"; private static final String COLLECTION_CONFIG_DIR = RESOURCES_DIR + File.separator + "collection1" + File.separator + "conf"; @Test public void testUpdateOperations() throws Exception { /** * Upload configs to ZK for create collection */ uploadConfigDirToZk(COLLECTION_CONFIG_DIR); /** * create collection collection1 as admin user * and clean all document in the collection1 */ setupCollection(TEST_COLLECTION_NAME1); cleanSolrCollection(TEST_COLLECTION_NAME1); SolrInputDocument solrInputDoc = createSolrTestDoc(); /** * user0->group0->role0 * grant ALL privilege on collection collection1 to role0 */ String grantor = "user0"; grantCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role0", SearchConstants.ALL); cleanSolrCollection(TEST_COLLECTION_NAME1); verifyUpdatePass(grantor, TEST_COLLECTION_NAME1, solrInputDoc); verifyDeletedocsPass(grantor, TEST_COLLECTION_NAME1, false); //drop privilege dropCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER); verifyUpdateFail(grantor, TEST_COLLECTION_NAME1, solrInputDoc); uploadSolrDoc(TEST_COLLECTION_NAME1, solrInputDoc); verifyDeletedocsFail(grantor, TEST_COLLECTION_NAME1, false); /** * user1->group1->role1 * grant UPDATE privilege on collection collection1 to role1 */ grantor = "user1"; grantCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role1", SearchConstants.UPDATE); cleanSolrCollection(TEST_COLLECTION_NAME1); verifyUpdatePass(grantor, TEST_COLLECTION_NAME1, solrInputDoc); verifyDeletedocsPass(grantor, TEST_COLLECTION_NAME1, false); //revoke privilege revokeCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role1", SearchConstants.ALL); verifyUpdateFail(grantor, TEST_COLLECTION_NAME1, solrInputDoc); uploadSolrDoc(TEST_COLLECTION_NAME1, solrInputDoc); verifyDeletedocsFail(grantor, TEST_COLLECTION_NAME1, false); /** * user2->group2->role2 * grant QUERY privilege on collection collection1 to role2 */ grantor = "user2"; grantCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role2", SearchConstants.QUERY); cleanSolrCollection(TEST_COLLECTION_NAME1); verifyUpdateFail(grantor, TEST_COLLECTION_NAME1, solrInputDoc); uploadSolrDoc(TEST_COLLECTION_NAME1, solrInputDoc); verifyDeletedocsFail(grantor, TEST_COLLECTION_NAME1, false); grantCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role2", SearchConstants.ALL); cleanSolrCollection(TEST_COLLECTION_NAME1); verifyUpdatePass(grantor, TEST_COLLECTION_NAME1, solrInputDoc); verifyDeletedocsPass(grantor, TEST_COLLECTION_NAME1, false); grantor = "user3"; cleanSolrCollection(TEST_COLLECTION_NAME1); verifyUpdateFail(grantor, TEST_COLLECTION_NAME1, solrInputDoc); uploadSolrDoc(TEST_COLLECTION_NAME1, solrInputDoc); verifyDeletedocsFail(grantor, TEST_COLLECTION_NAME1, false); deleteCollection(TEST_COLLECTION_NAME1); } }
apache-2.0
tabish121/activemq-artemis
artemis-server/src/test/java/org/apache/activemq/artemis/spi/core/security/jaas/LDAPLoginModuleTest.java
19624
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.spi.core.security.jaas; import javax.naming.Context; import javax.naming.NameClassPair; import javax.naming.NamingEnumeration; import javax.naming.directory.DirContext; import javax.naming.directory.InitialDirContext; import javax.security.auth.Subject; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.auth.login.FailedLoginException; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import javax.security.auth.spi.LoginModule; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.directory.server.annotations.CreateLdapServer; import org.apache.directory.server.annotations.CreateTransport; import org.apache.directory.server.core.annotations.ApplyLdifFiles; import org.apache.directory.server.core.integ.AbstractLdapTestUnit; import org.apache.directory.server.core.integ.FrameworkRunner; import org.jboss.logging.Logger; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(FrameworkRunner.class) @CreateLdapServer(transports = {@CreateTransport(protocol = "LDAP", port = 1024)}, allowAnonymousAccess = true) @ApplyLdifFiles("test.ldif") public class LDAPLoginModuleTest extends AbstractLdapTestUnit { private static final Logger logger = Logger.getLogger(LDAPLoginModuleTest.class); private static final String PRINCIPAL = "uid=admin,ou=system"; private static final String CREDENTIALS = "secret"; private final String loginConfigSysPropName = "java.security.auth.login.config"; private String oldLoginConfig; @Before public void setLoginConfigSysProperty() { oldLoginConfig = System.getProperty(loginConfigSysPropName, null); System.setProperty(loginConfigSysPropName, "src/test/resources/login.config"); } @After public void resetLoginConfigSysProperty() { if (oldLoginConfig != null) { System.setProperty(loginConfigSysPropName, oldLoginConfig); } } @Test public void testRunning() throws Exception { Hashtable<String, String> env = new Hashtable<>(); env.put(Context.PROVIDER_URL, "ldap://localhost:1024"); env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory"); env.put(Context.SECURITY_AUTHENTICATION, "simple"); env.put(Context.SECURITY_PRINCIPAL, PRINCIPAL); env.put(Context.SECURITY_CREDENTIALS, CREDENTIALS); DirContext ctx = new InitialDirContext(env); HashSet<String> set = new HashSet<>(); NamingEnumeration<NameClassPair> list = ctx.list("ou=system"); while (list.hasMore()) { NameClassPair ncp = list.next(); set.add(ncp.getName()); } assertTrue(set.contains("uid=admin")); assertTrue(set.contains("ou=users")); assertTrue(set.contains("ou=groups")); assertTrue(set.contains("ou=configuration")); assertTrue(set.contains("prefNodeName=sysPrefRoot")); } @Test public void testLogin() throws Exception { logger.info("num session: " + ldapServer.getLdapSessionManager().getSessions().length); LoginContext context = new LoginContext("LDAPLogin", new CallbackHandler() { @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback) callbacks[i]).setName("first"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback) callbacks[i]).setPassword("secret".toCharArray()); } else { throw new UnsupportedCallbackException(callbacks[i]); } } } }); context.login(); context.logout(); assertTrue("sessions still active after logout", waitFor(() -> ldapServer.getLdapSessionManager().getSessions().length == 0)); } @Test public void testLoginPooled() throws Exception { CallbackHandler callbackHandler = callbacks -> { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback) callbacks[i]).setName("first"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback) callbacks[i]).setPassword("secret".toCharArray()); } else { throw new UnsupportedCallbackException(callbacks[i]); } } }; LoginContext context = new LoginContext("LDAPLoginPooled", callbackHandler); context.login(); context.logout(); // again context.login(); context.logout(); // new context context = new LoginContext("LDAPLoginPooled", callbackHandler); context.login(); context.logout(); Executor pool = Executors.newCachedThreadPool(); for (int i = 0; i < 20; i++) { pool.execute(() -> { try { LoginContext context1 = new LoginContext("LDAPLoginPooled", callbackHandler); context1.login(); context1.logout(); } catch (Exception ignored) { } }); } /* * The number of sessions here is variable due to the pool used to create the LoginContext objects and the pooling * for the LDAP connections (which are managed by the JVM implementation). We really just need to confirm that * there are still connections to the LDAP server open even after all the LoginContext objects are closed as that * will indicate the LDAP connection pooling is working. */ assertTrue("not enough active sessions after logout", waitFor(() -> ldapServer.getLdapSessionManager().getSessions().length >= 5)); ((ExecutorService) pool).shutdown(); ((ExecutorService) pool).awaitTermination(2, TimeUnit.SECONDS); } public interface Condition { boolean isSatisfied() throws Exception; } private boolean waitFor(final Condition condition) throws Exception { final long expiry = System.currentTimeMillis() + 5000; boolean conditionSatisified = condition.isSatisfied(); while (!conditionSatisified && System.currentTimeMillis() < expiry) { TimeUnit.MILLISECONDS.sleep(100); conditionSatisified = condition.isSatisfied(); } return conditionSatisified; } @Test public void testUnauthenticated() throws Exception { LoginContext context = new LoginContext("UnAuthenticatedLDAPLogin", new CallbackHandler() { @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback) callbacks[i]).setName("first"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback) callbacks[i]).setPassword("secret".toCharArray()); } else { throw new UnsupportedCallbackException(callbacks[i]); } } } }); try { context.login(); } catch (LoginException le) { assertEquals(le.getCause().getMessage(), "Empty password is not allowed"); return; } fail("Should have failed authenticating"); assertTrue("sessions still active after logout", waitFor(() -> ldapServer.getLdapSessionManager().getSessions().length == 0)); } @Test public void testAuthenticatedViaBindOnAnonConnection() throws Exception { LoginContext context = new LoginContext("AnonBindCheckUserLDAPLogin", new CallbackHandler() { @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback) callbacks[i]).setName("first"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback) callbacks[i]).setPassword("wrongSecret".toCharArray()); } else { throw new UnsupportedCallbackException(callbacks[i]); } } } }); try { context.login(); fail("Should have failed authenticating"); } catch (FailedLoginException expected) { } assertTrue("sessions still active after logout", waitFor(() -> ldapServer.getLdapSessionManager().getSessions().length == 0)); } @Test public void testAuthenticatedOkViaBindOnAnonConnection() throws Exception { LoginContext context = new LoginContext("AnonBindCheckUserLDAPLogin", new CallbackHandler() { @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback) callbacks[i]).setName("first"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback) callbacks[i]).setPassword("secret".toCharArray()); } else { throw new UnsupportedCallbackException(callbacks[i]); } } } }); context.login(); context.logout(); assertTrue("sessions still active after logout", waitFor(() -> ldapServer.getLdapSessionManager().getSessions().length == 0)); } @Test public void testCommitOnFailedLogin() throws LoginException { LoginModule loginModule = new LDAPLoginModule(); JaasCallbackHandler callbackHandler = new JaasCallbackHandler(null, null, null); loginModule.initialize(new Subject(), callbackHandler, null, new HashMap<String, Object>()); // login should return false due to null username assertFalse(loginModule.login()); // since login failed commit should return false as well assertFalse(loginModule.commit()); } @Test public void testPropertyConfigMap() throws Exception { LDAPLoginModule loginModule = new LDAPLoginModule(); JaasCallbackHandler callbackHandler = new JaasCallbackHandler(null, null, null); Field configMap = null; HashMap<String, Object> options = new HashMap<>(); for (Field field: loginModule.getClass().getDeclaredFields()) { if (Modifier.isStatic(field.getModifiers()) && Modifier.isFinal(field.getModifiers()) && field.getType().isAssignableFrom(String.class)) { field.setAccessible(true); options.put((String)field.get(loginModule), "SET"); } if (field.getName().equals("config")) { field.setAccessible(true); configMap = field; } } loginModule.initialize(new Subject(), callbackHandler, null, options); Set<LDAPLoginProperty> ldapProps = (Set<LDAPLoginProperty>) configMap.get(loginModule); for (String key: options.keySet()) { assertTrue("val set: " + key, presentIn(ldapProps, key)); } } @Test public void testEmptyPassword() throws Exception { LoginContext context = new LoginContext("LDAPLogin", new CallbackHandler() { @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback) callbacks[i]).setName("first"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback) callbacks[i]).setPassword("".toCharArray()); } else { throw new UnsupportedCallbackException(callbacks[i]); } } } }); try { context.login(); fail("Should have thrown a FailedLoginException"); } catch (FailedLoginException fle) { assertEquals("Password cannot be null or empty", fle.getMessage()); } context.logout(); } @Test public void testNullPassword() throws Exception { LoginContext context = new LoginContext("LDAPLogin", new CallbackHandler() { @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { ((NameCallback) callbacks[i]).setName("first"); } else if (callbacks[i] instanceof PasswordCallback) { ((PasswordCallback) callbacks[i]).setPassword(null); } else { throw new UnsupportedCallbackException(callbacks[i]); } } } }); try { context.login(); fail("Should have thrown a FailedLoginException"); } catch (FailedLoginException fle) { assertEquals("Password cannot be null or empty", fle.getMessage()); } context.logout(); } @Test public void testEnvironmentProperties() throws Exception { HashMap<String, Object> options = new HashMap<>(); // set module configs for (LDAPLoginModule.ConfigKey configKey: LDAPLoginModule.ConfigKey.values()) { if (configKey.getName().equals("initialContextFactory")) { options.put(configKey.getName(), "com.sun.jndi.ldap.LdapCtxFactory"); } else if (configKey.getName().equals("connectionURL")) { options.put(configKey.getName(), "ldap://localhost:1024"); } else if (configKey.getName().equals("referral")) { options.put(configKey.getName(), "ignore"); } else if (configKey.getName().equals("connectionTimeout")) { options.put(configKey.getName(), "10000"); } else if (configKey.getName().equals("readTimeout")) { options.put(configKey.getName(), "11000"); } else if (configKey.getName().equals("authentication")) { options.put(configKey.getName(), "simple"); } else if (configKey.getName().equals("connectionUsername")) { options.put(configKey.getName(), PRINCIPAL); } else if (configKey.getName().equals("connectionPassword")) { options.put(configKey.getName(), CREDENTIALS); } else if (configKey.getName().equals("connectionProtocol")) { options.put(configKey.getName(), "s"); } else if (configKey.getName().equals("debug")) { options.put(configKey.getName(), "true"); } else { options.put(configKey.getName(), configKey.getName() + "_value_set"); } } // add extra configs options.put("com.sun.jndi.ldap.tls.cbtype", "tls-server-end-point"); options.put("randomConfig", "some-value"); // add non-strings configs options.put("non.string.1", new Object()); options.put("non.string.2", 1); // create context LDAPLoginModule loginModule = new LDAPLoginModule(); loginModule.initialize(new Subject(), null, null, options); loginModule.openContext(); // get created environment Hashtable<?, ?> environment = loginModule.context.getEnvironment(); // cleanup loginModule.closeContext(); // module config keys should not be passed to environment for (LDAPLoginModule.ConfigKey configKey: LDAPLoginModule.ConfigKey.values()) { assertEquals("value should not be set for key: " + configKey.getName(), null, environment.get(configKey.getName())); } // extra, non-module configs should be passed to environment assertEquals("value should be set for key: " + "com.sun.jndi.ldap.tls.cbtype", "tls-server-end-point", environment.get("com.sun.jndi.ldap.tls.cbtype")); assertEquals("value should be set for key: " + "randomConfig", "some-value", environment.get("randomConfig")); // non-string configs should not be passed to environment assertEquals("value should not be set for key: " + "non.string.1", null, environment.get("non.string.1")); assertEquals("value should not be set for key: " + "non.string.2", null, environment.get("non.string.2")); // environment configs should be set assertEquals("value should be set for key: " + Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory", environment.get(Context.INITIAL_CONTEXT_FACTORY)); assertEquals("value should be set for key: " + Context.PROVIDER_URL, "ldap://localhost:1024", environment.get(Context.PROVIDER_URL)); assertEquals("value should be set for key: " + Context.REFERRAL, "ignore", environment.get(Context.REFERRAL)); assertEquals("value should be set for key: " + "com.sun.jndi.ldap.connect.timeout", "10000", environment.get("com.sun.jndi.ldap.connect.timeout")); assertEquals("value should be set for key: " + "com.sun.jndi.ldap.read.timeout", "11000", environment.get("com.sun.jndi.ldap.read.timeout")); assertEquals("value should be set for key: " + Context.SECURITY_AUTHENTICATION, "simple", environment.get(Context.SECURITY_AUTHENTICATION)); assertEquals("value should be set for key: " + Context.SECURITY_PRINCIPAL, PRINCIPAL, environment.get(Context.SECURITY_PRINCIPAL)); assertEquals("value should be set for key: " + Context.SECURITY_CREDENTIALS, CREDENTIALS, environment.get(Context.SECURITY_CREDENTIALS)); assertEquals("value should be set for key: " + Context.SECURITY_PROTOCOL, "s", environment.get(Context.SECURITY_PROTOCOL)); } private boolean presentIn(Set<LDAPLoginProperty> ldapProps, String propertyName) { for (LDAPLoginProperty conf : ldapProps) { if (conf.getPropertyName().equals(propertyName) && (conf.getPropertyValue() != null && !"".equals(conf.getPropertyValue()))) return true; } return false; } }
apache-2.0
zer0se7en/netty
common/src/main/java/io/netty/util/concurrent/MultithreadEventExecutorGroup.java
8173
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.util.concurrent; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; /** * Abstract base class for {@link EventExecutorGroup} implementations that handles their tasks with multiple threads at * the same time. */ public abstract class MultithreadEventExecutorGroup extends AbstractEventExecutorGroup { private final EventExecutor[] children; private final Set<EventExecutor> readonlyChildren; private final AtomicInteger terminatedChildren = new AtomicInteger(); private final Promise<?> terminationFuture = new DefaultPromise(GlobalEventExecutor.INSTANCE); private final EventExecutorChooserFactory.EventExecutorChooser chooser; /** * Create a new instance. * * @param nThreads the number of threads that will be used by this instance. * @param threadFactory the ThreadFactory to use, or {@code null} if the default should be used. * @param args arguments which will passed to each {@link #newChild(Executor, Object...)} call */ protected MultithreadEventExecutorGroup(int nThreads, ThreadFactory threadFactory, Object... args) { this(nThreads, threadFactory == null ? null : new ThreadPerTaskExecutor(threadFactory), args); } /** * Create a new instance. * * @param nThreads the number of threads that will be used by this instance. * @param executor the Executor to use, or {@code null} if the default should be used. * @param args arguments which will passed to each {@link #newChild(Executor, Object...)} call */ protected MultithreadEventExecutorGroup(int nThreads, Executor executor, Object... args) { this(nThreads, executor, DefaultEventExecutorChooserFactory.INSTANCE, args); } /** * Create a new instance. * * @param nThreads the number of threads that will be used by this instance. * @param executor the Executor to use, or {@code null} if the default should be used. * @param chooserFactory the {@link EventExecutorChooserFactory} to use. * @param args arguments which will passed to each {@link #newChild(Executor, Object...)} call */ protected MultithreadEventExecutorGroup(int nThreads, Executor executor, EventExecutorChooserFactory chooserFactory, Object... args) { if (nThreads <= 0) { throw new IllegalArgumentException(String.format("nThreads: %d (expected: > 0)", nThreads)); } if (executor == null) { executor = new ThreadPerTaskExecutor(newDefaultThreadFactory()); } children = new EventExecutor[nThreads]; for (int i = 0; i < nThreads; i ++) { boolean success = false; try { children[i] = newChild(executor, args); success = true; } catch (Exception e) { // TODO: Think about if this is a good exception type throw new IllegalStateException("failed to create a child event loop", e); } finally { if (!success) { for (int j = 0; j < i; j ++) { children[j].shutdownGracefully(); } for (int j = 0; j < i; j ++) { EventExecutor e = children[j]; try { while (!e.isTerminated()) { e.awaitTermination(Integer.MAX_VALUE, TimeUnit.SECONDS); } } catch (InterruptedException interrupted) { // Let the caller handle the interruption. Thread.currentThread().interrupt(); break; } } } } } chooser = chooserFactory.newChooser(children); final FutureListener<Object> terminationListener = new FutureListener<Object>() { @Override public void operationComplete(Future<Object> future) throws Exception { if (terminatedChildren.incrementAndGet() == children.length) { terminationFuture.setSuccess(null); } } }; for (EventExecutor e: children) { e.terminationFuture().addListener(terminationListener); } Set<EventExecutor> childrenSet = new LinkedHashSet<EventExecutor>(children.length); Collections.addAll(childrenSet, children); readonlyChildren = Collections.unmodifiableSet(childrenSet); } protected ThreadFactory newDefaultThreadFactory() { return new DefaultThreadFactory(getClass()); } @Override public EventExecutor next() { return chooser.next(); } @Override public Iterator<EventExecutor> iterator() { return readonlyChildren.iterator(); } /** * Return the number of {@link EventExecutor} this implementation uses. This number is the maps * 1:1 to the threads it use. */ public final int executorCount() { return children.length; } /** * Create a new EventExecutor which will later then accessible via the {@link #next()} method. This method will be * called for each thread that will serve this {@link MultithreadEventExecutorGroup}. * */ protected abstract EventExecutor newChild(Executor executor, Object... args) throws Exception; @Override public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) { for (EventExecutor l: children) { l.shutdownGracefully(quietPeriod, timeout, unit); } return terminationFuture(); } @Override public Future<?> terminationFuture() { return terminationFuture; } @Override @Deprecated public void shutdown() { for (EventExecutor l: children) { l.shutdown(); } } @Override public boolean isShuttingDown() { for (EventExecutor l: children) { if (!l.isShuttingDown()) { return false; } } return true; } @Override public boolean isShutdown() { for (EventExecutor l: children) { if (!l.isShutdown()) { return false; } } return true; } @Override public boolean isTerminated() { for (EventExecutor l: children) { if (!l.isTerminated()) { return false; } } return true; } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { long deadline = System.nanoTime() + unit.toNanos(timeout); loop: for (EventExecutor l: children) { for (;;) { long timeLeft = deadline - System.nanoTime(); if (timeLeft <= 0) { break loop; } if (l.awaitTermination(timeLeft, TimeUnit.NANOSECONDS)) { break; } } } return isTerminated(); } }
apache-2.0
zazi/Wikidata-Toolkit
wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/DataObjectFactoryImpl.java
6558
package org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.math.BigDecimal; import java.util.Collections; import java.util.List; import java.util.Map; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; /** * Implementation of {@link DataObjectFactory} that uses the data object * implementations from this package. * <p> * <b>Note:</b> If you are using this factory in your code, you might want to * consider the simpler static methods of {@link Datamodel} instead. * * @author Markus Kroetzsch * */ public class DataObjectFactoryImpl implements DataObjectFactory { @Override public ItemIdValue getItemIdValue(String id, String siteIri) { return ItemIdValueImpl.create(id, siteIri); } @Override public PropertyIdValue getPropertyIdValue(String id, String siteIri) { return PropertyIdValueImpl.create(id, siteIri); } @Override public DatatypeIdValue getDatatypeIdValue(String id) { return new DatatypeIdImpl(id); } @Override public TimeValue getTimeValue(long year, byte month, byte day, byte hour, byte minute, byte second, byte precision, int beforeTolerance, int afterTolerance, int timezoneOffset, String calendarModel) { return new TimeValueImpl(year, month, day, hour, minute, second, precision, beforeTolerance, afterTolerance, timezoneOffset, calendarModel); } @Override public GlobeCoordinatesValue getGlobeCoordinatesValue(double latitude, double longitude, double precision, String globeIri) { return new GlobeCoordinatesValueImpl(latitude, longitude, precision, globeIri); } @Override public StringValue getStringValue(String string) { return new StringValueImpl(string); } @Override public MonolingualTextValue getMonolingualTextValue(String text, String languageCode) { return new MonolingualTextValueImpl(text, languageCode); } @Override public QuantityValue getQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound) { return new QuantityValueImpl(numericValue, lowerBound, upperBound); } @Override public ValueSnak getValueSnak(PropertyIdValue propertyId, Value value) { return new ValueSnakImpl(propertyId, value); } @Override public SomeValueSnak getSomeValueSnak(PropertyIdValue propertyId) { return new SomeValueSnakImpl(propertyId); } @Override public NoValueSnak getNoValueSnak(PropertyIdValue propertyId) { return new NoValueSnakImpl(propertyId); } @Override public SnakGroup getSnakGroup(List<? extends Snak> snaks) { return new SnakGroupImpl(snaks); } @Override public Claim getClaim(EntityIdValue subject, Snak mainSnak, List<SnakGroup> qualifiers) { return new ClaimImpl(subject, mainSnak, qualifiers); } @Override public Reference getReference(List<SnakGroup> snakGroups) { return new ReferenceImpl(snakGroups); } @Override public Statement getStatement(Claim claim, List<? extends Reference> references, StatementRank rank, String statementId) { return new StatementImpl(claim, references, rank, statementId); } @Override public StatementGroup getStatementGroup(List<Statement> statements) { return new StatementGroupImpl(statements); } @Override public SiteLink getSiteLink(String title, String siteKey, List<String> badges) { return new SiteLinkImpl(title, siteKey, badges); } @Override public PropertyDocument getPropertyDocument(PropertyIdValue propertyId, List<MonolingualTextValue> labels, List<MonolingualTextValue> descriptions, List<MonolingualTextValue> aliases, DatatypeIdValue datatypeId) { return new PropertyDocumentImpl(propertyId, labels, descriptions, aliases, Collections.<StatementGroup> emptyList(), datatypeId); } @Override public PropertyDocument getPropertyDocument(PropertyIdValue propertyId, List<MonolingualTextValue> labels, List<MonolingualTextValue> descriptions, List<MonolingualTextValue> aliases, List<StatementGroup> statementGroups, DatatypeIdValue datatypeId) { return new PropertyDocumentImpl(propertyId, labels, descriptions, aliases, statementGroups, datatypeId); } @Override public ItemDocument getItemDocument(ItemIdValue itemIdValue, List<MonolingualTextValue> labels, List<MonolingualTextValue> descriptions, List<MonolingualTextValue> aliases, List<StatementGroup> statementGroups, Map<String, SiteLink> siteLinks) { return new ItemDocumentImpl(itemIdValue, labels, descriptions, aliases, statementGroups, siteLinks); } }
apache-2.0
emsouza/archiva
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsOnUpdateDownloadPolicy.java
3263
package org.apache.archiva.policies; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.commons.lang.StringUtils; import org.springframework.stereotype.Service; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; /** * PropagateErrorsPolicy - a policy applied on error to determine how to treat the error. */ @Service( "downloadErrorPolicy#propagate-errors-on-update" ) public class PropagateErrorsOnUpdateDownloadPolicy implements DownloadErrorPolicy { /** * Signifies any error should cause a failure whether the artifact is already present or not. */ public static final String ALWAYS = "always"; /** * Signifies any error should cause a failure only if the artifact is not already present. */ public static final String NOT_PRESENT = "artifact not already present"; private List<String> options = new ArrayList<>( 2 ); public PropagateErrorsOnUpdateDownloadPolicy() { options.add( ALWAYS ); options.add( NOT_PRESENT ); } @Override public boolean applyPolicy( String policySetting, Properties request, File localFile, Exception exception, Map<String, Exception> previousExceptions ) throws PolicyConfigurationException { if ( !options.contains( policySetting ) ) { // Not a valid code. throw new PolicyConfigurationException( "Unknown error policy setting [" + policySetting + "], valid settings are [" + StringUtils.join( options.iterator(), "," ) + "]" ); } if ( ALWAYS.equals( policySetting ) ) { // throw ther exception regardless return true; } if ( NOT_PRESENT.equals( policySetting ) ) { // cancel the exception if the file exists return !localFile.exists(); } throw new PolicyConfigurationException( "Unable to process checksum policy of [" + policySetting + "], please file a bug report." ); } @Override public String getDefaultOption() { return NOT_PRESENT; } @Override public String getId() { return "propagate-errors-on-update"; } @Override public String getName() { return "Return error when"; } @Override public List<String> getOptions() { return options; } }
apache-2.0
Squeegee/batik
sources/org/apache/batik/svggen/font/table/CmapFormat0.java
1855
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.svggen.font.table; import java.io.IOException; import java.io.RandomAccessFile; /** * Simple Macintosh cmap table, mapping only the ASCII character set to glyphs. * * @version $Id$ * @author <a href="mailto:david@steadystate.co.uk">David Schweinsberg</a> */ public class CmapFormat0 extends CmapFormat { private int[] glyphIdArray = new int[256]; private int first, last; protected CmapFormat0(RandomAccessFile raf) throws IOException { super(raf); format = 0; first = -1; for (int i = 0; i < 256; i++) { glyphIdArray[i] = raf.readUnsignedByte(); if (glyphIdArray[i] > 0) { if (first == -1) first = i; last = i; } } } public int getFirst() { return first; } public int getLast() { return last; } public int mapCharCode(int charCode) { if (0 <= charCode && charCode < 256) { return glyphIdArray[charCode]; } else { return 0; } } }
apache-2.0
nmldiegues/stibt
infinispan/core/src/main/java/org/infinispan/factories/annotations/DefaultFactoryFor.java
1896
/* * JBoss, Home of Professional Open Source * Copyright 2009 Red Hat Inc. and/or its affiliates and other * contributors as indicated by the @author tags. All rights reserved. * See the copyright.txt in the distribution for a full listing of * individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.infinispan.factories.annotations; import static java.lang.annotation.ElementType.TYPE; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * An annotation that is used internally, for defining a DEFAULT factory to be used when constructing components. This * annotation allows you to define which components can be constructed by the annotated factory. * * @author Manik Surtani (<a href="mailto:manik@jboss.org">manik@jboss.org</a>) * @since 4.0 */ @Target(TYPE) @Retention(RetentionPolicy.RUNTIME) public @interface DefaultFactoryFor { /** * Components that may be constructed by a factory annotated with this annotation. * * @return classes that can be constructed by this factory */ public abstract Class<?>[] classes(); }
apache-2.0
zstackorg/zstack
header/src/main/java/org/zstack/header/identity/APIRenewSessionEvent.java
1022
package org.zstack.header.identity; import org.zstack.header.message.APIEvent; import org.zstack.header.rest.RestResponse; import java.sql.Timestamp; @RestResponse(allTo = "inventory") public class APIRenewSessionEvent extends APIEvent { private SessionInventory inventory; public APIRenewSessionEvent(String apiId) { super(apiId); } public APIRenewSessionEvent() { super(null); } public SessionInventory getInventory() { return inventory; } public void setInventory(SessionInventory inventory) { this.inventory = inventory; } public static APIRenewSessionEvent __example__() { APIRenewSessionEvent event = new APIRenewSessionEvent(); SessionInventory inventory = new SessionInventory(); inventory.setUuid(uuid()); inventory.setAccountUuid(uuid()); inventory.setExpiredDate(new Timestamp(org.zstack.header.message.DocUtils.date)); event.setInventory(inventory); return event; } }
apache-2.0
Alexey1Gavrilov/dropwizard
dropwizard-client/src/main/java/io/dropwizard/client/ssl/TlsConfiguration.java
5442
package io.dropwizard.client.ssl; import com.fasterxml.jackson.annotation.JsonProperty; import io.dropwizard.util.Strings; import io.dropwizard.validation.ValidationMethod; import javax.annotation.Nullable; import javax.validation.constraints.NotEmpty; import java.io.File; import java.util.List; public class TlsConfiguration { @NotEmpty private String protocol = "TLSv1.2"; @Nullable private String provider; @Nullable private File keyStorePath; @Nullable private String keyStorePassword; @NotEmpty private String keyStoreType = "JKS"; @Nullable private String keyStoreProvider; @Nullable private File trustStorePath; @Nullable private String trustStorePassword; @NotEmpty private String trustStoreType = "JKS"; @Nullable private String trustStoreProvider; private boolean trustSelfSignedCertificates = false; private boolean verifyHostname = true; @Nullable private List<String> supportedProtocols = null; @Nullable private List<String> supportedCiphers = null; @Nullable private String certAlias = null; @JsonProperty public void setTrustSelfSignedCertificates(boolean trustSelfSignedCertificates) { this.trustSelfSignedCertificates = trustSelfSignedCertificates; } @JsonProperty public boolean isTrustSelfSignedCertificates() { return trustSelfSignedCertificates; } @JsonProperty @Nullable public File getKeyStorePath() { return keyStorePath; } @JsonProperty public void setKeyStorePath(File keyStorePath) { this.keyStorePath = keyStorePath; } @JsonProperty @Nullable public String getKeyStorePassword() { return keyStorePassword; } @JsonProperty public void setKeyStorePassword(String keyStorePassword) { this.keyStorePassword = keyStorePassword; } @JsonProperty public String getKeyStoreType() { return keyStoreType; } @JsonProperty public void setKeyStoreType(String keyStoreType) { this.keyStoreType = keyStoreType; } @JsonProperty public String getTrustStoreType() { return trustStoreType; } @JsonProperty public void setTrustStoreType(String trustStoreType) { this.trustStoreType = trustStoreType; } @JsonProperty @Nullable public File getTrustStorePath() { return trustStorePath; } @JsonProperty public void setTrustStorePath(File trustStorePath) { this.trustStorePath = trustStorePath; } @JsonProperty @Nullable public String getTrustStorePassword() { return trustStorePassword; } @JsonProperty public void setTrustStorePassword(String trustStorePassword) { this.trustStorePassword = trustStorePassword; } @JsonProperty public boolean isVerifyHostname() { return verifyHostname; } @JsonProperty public void setVerifyHostname(boolean verifyHostname) { this.verifyHostname = verifyHostname; } @JsonProperty public String getProtocol() { return protocol; } @JsonProperty public void setProtocol(String protocol) { this.protocol = protocol; } @JsonProperty @Nullable public String getProvider() { return provider; } @JsonProperty public void setProvider(@Nullable String provider) { this.provider = provider; } @Nullable @JsonProperty public List<String> getSupportedCiphers() { return supportedCiphers; } @JsonProperty public void setSupportedCiphers(@Nullable List<String> supportedCiphers) { this.supportedCiphers = supportedCiphers; } @Nullable @JsonProperty public List<String> getSupportedProtocols() { return supportedProtocols; } @JsonProperty public void setSupportedProtocols(@Nullable List<String> supportedProtocols) { this.supportedProtocols = supportedProtocols; } @Nullable @JsonProperty public String getCertAlias() { return certAlias; } @JsonProperty public void setCertAlias(@Nullable String certAlias) { this.certAlias = certAlias; } @ValidationMethod(message = "keyStorePassword should not be null or empty if keyStorePath not null") public boolean isValidKeyStorePassword() { return keyStorePath == null || keyStoreType.startsWith("Windows-") || !Strings.isNullOrEmpty(keyStorePassword); } @ValidationMethod(message = "trustStorePassword should not be null or empty if trustStorePath not null") public boolean isValidTrustStorePassword() { return trustStorePath == null || trustStoreType.startsWith("Windows-") || !Strings.isNullOrEmpty(trustStorePassword); } /** * @since 2.0 */ @Nullable public String getKeyStoreProvider() { return keyStoreProvider; } /** * @since 2.0 */ public void setKeyStoreProvider(@Nullable String keyStoreProvider) { this.keyStoreProvider = keyStoreProvider; } /** * @since 2.0 */ @Nullable public String getTrustStoreProvider() { return trustStoreProvider; } /** * @since 2.0 */ public void setTrustStoreProvider(@Nullable String trustStoreProvider) { this.trustStoreProvider = trustStoreProvider; } }
apache-2.0
brat000012001/keycloak
testsuite/integration-arquillian/tests/base/src/test/java/org/keycloak/testsuite/authz/UmaDiscoveryDocumentTest.java
3625
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.authz; import static org.junit.Assert.assertEquals; import java.net.URI; import java.util.List; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import org.jboss.arquillian.test.api.ArquillianResource; import org.junit.Test; import org.keycloak.authorization.config.UmaConfiguration; import org.keycloak.authorization.config.UmaWellKnownProviderFactory; import org.keycloak.protocol.oidc.OIDCLoginProtocolService; import org.keycloak.representations.idm.RealmRepresentation; import org.keycloak.services.resources.RealmsResource; import org.keycloak.testsuite.AbstractKeycloakTest; import org.keycloak.testsuite.admin.AbstractAdminTest; import org.keycloak.testsuite.util.OAuthClient; public class UmaDiscoveryDocumentTest extends AbstractKeycloakTest { @ArquillianResource protected OAuthClient oauth; @Override public void addTestRealms(List<RealmRepresentation> testRealms) { RealmRepresentation realm = AbstractAdminTest.loadJson(getClass().getResourceAsStream("/testrealm.json"), RealmRepresentation.class); testRealms.add(realm); } @Test public void testFetchDiscoveryDocument() { Client client = ClientBuilder.newClient(); UriBuilder builder = UriBuilder.fromUri(OAuthClient.AUTH_SERVER_ROOT); URI oidcDiscoveryUri = RealmsResource.wellKnownProviderUrl(builder).build("test", UmaWellKnownProviderFactory.PROVIDER_ID); WebTarget oidcDiscoveryTarget = client.target(oidcDiscoveryUri); try (Response response = oidcDiscoveryTarget.request().get()) { assertEquals("no-cache, must-revalidate, no-transform, no-store", response.getHeaders().getFirst("Cache-Control")); UmaConfiguration configuration = response.readEntity(UmaConfiguration.class); assertEquals(configuration.getAuthorizationEndpoint(), OIDCLoginProtocolService.authUrl(UriBuilder.fromUri(OAuthClient.AUTH_SERVER_ROOT)).build("test").toString()); assertEquals(configuration.getTokenEndpoint(), oauth.getAccessTokenUrl()); assertEquals(configuration.getJwksUri(), oauth.getCertsUrl("test")); assertEquals(configuration.getTokenIntrospectionEndpoint(), oauth.getTokenIntrospectionUrl()); String registrationUri = UriBuilder .fromUri(OAuthClient.AUTH_SERVER_ROOT) .path(RealmsResource.class).path(RealmsResource.class, "getRealmResource").build(realmsResouce().realm("test").toRepresentation().getRealm()).toString(); assertEquals(registrationUri + "/authz/protection/permission", configuration.getPermissionEndpoint().toString()); assertEquals(registrationUri + "/authz/protection/resource_set", configuration.getResourceRegistrationEndpoint().toString()); } } }
apache-2.0
cereblanco/usbong-builder
src/main/java/usbong/android/builder/controllers/SelectScreenController.java
1056
package usbong.android.builder.controllers; import com.activeandroid.query.Select; import rx.Observable; import rx.Observer; import rx.Subscriber; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; import usbong.android.builder.models.Screen; import java.util.List; /** * Created by Rocky Camacho on 7/2/2014. */ public class SelectScreenController implements Controller { public void fetchScreens(Observer<List<Screen>> observer) { getScreens().observeOn(AndroidSchedulers.mainThread()) .subscribe(observer); } private Observable<List<Screen>> getScreens() { return Observable.create(new Observable.OnSubscribe<List<Screen>>() { @Override public void call(Subscriber<? super List<Screen>> subscriber) { List<Screen> screens = new Select().from(Screen.class).execute(); subscriber.onNext(screens); subscriber.onCompleted(); } }) .subscribeOn(Schedulers.io()); } }
apache-2.0
falko/camunda-bpm-platform
engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/optimize/OptimizeCompletedTaskInstanceRestServiceTest.java
5040
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.optimize; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.impl.OptimizeService; import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl; import org.camunda.bpm.engine.rest.AbstractRestServiceTest; import org.camunda.bpm.engine.rest.helper.MockProvider; import org.camunda.bpm.engine.rest.util.container.TestContainerRule; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import java.util.Date; import static io.restassured.RestAssured.given; import static org.camunda.bpm.engine.rest.util.DateTimeUtils.DATE_FORMAT_WITH_TIMEZONE; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class OptimizeCompletedTaskInstanceRestServiceTest extends AbstractRestServiceTest { public static final String OPTIMIZE_COMPLETED_TASK_INSTANCE_PATH = TEST_RESOURCE_ROOT_PATH + "/optimize/task-instance/completed"; protected OptimizeService mockedOptimizeService; protected ProcessEngine namedProcessEngine; @ClassRule public static TestContainerRule rule = new TestContainerRule(); @Before public void setUpRuntimeData() { mockedOptimizeService = mock(OptimizeService.class); ProcessEngineConfigurationImpl mockedConfig = mock(ProcessEngineConfigurationImpl.class); namedProcessEngine = getProcessEngine(MockProvider.EXAMPLE_PROCESS_ENGINE_NAME); when(namedProcessEngine.getProcessEngineConfiguration()).thenReturn(mockedConfig); when(mockedConfig.getOptimizeService()).thenReturn(mockedOptimizeService); } @Test public void testNoQueryParameters() { given() .then() .expect() .statusCode(Status.OK.getStatusCode()) .contentType(MediaType.APPLICATION_JSON) .when() .get(OPTIMIZE_COMPLETED_TASK_INSTANCE_PATH); verify(mockedOptimizeService).getCompletedHistoricTaskInstances(null, null, Integer.MAX_VALUE); verifyNoMoreInteractions(mockedOptimizeService); } @Test public void testFinishedAfterQueryParameter() { Date now = new Date(); given() .queryParam("finishedAfter", DATE_FORMAT_WITH_TIMEZONE.format(now)) .then() .expect() .statusCode(Status.OK.getStatusCode()) .contentType(MediaType.APPLICATION_JSON) .when() .get(OPTIMIZE_COMPLETED_TASK_INSTANCE_PATH); verify(mockedOptimizeService).getCompletedHistoricTaskInstances(now, null, Integer.MAX_VALUE); verifyNoMoreInteractions(mockedOptimizeService); } @Test public void testFinishedAtQueryParameter() { Date now = new Date(); given() .queryParam("finishedAt", DATE_FORMAT_WITH_TIMEZONE.format(now)) .then() .expect() .statusCode(Status.OK.getStatusCode()) .contentType(MediaType.APPLICATION_JSON) .when() .get(OPTIMIZE_COMPLETED_TASK_INSTANCE_PATH); verify(mockedOptimizeService).getCompletedHistoricTaskInstances(null, now, Integer.MAX_VALUE); verifyNoMoreInteractions(mockedOptimizeService); } @Test public void testMaxResultsQueryParameter() { given() .queryParam("maxResults", 10) .then() .expect() .statusCode(Status.OK.getStatusCode()) .contentType(MediaType.APPLICATION_JSON) .when() .get(OPTIMIZE_COMPLETED_TASK_INSTANCE_PATH); verify(mockedOptimizeService).getCompletedHistoricTaskInstances(null, null, 10); verifyNoMoreInteractions(mockedOptimizeService); } @Test public void testQueryParameterCombination() { Date now = new Date(); given() .queryParam("finishedAfter", DATE_FORMAT_WITH_TIMEZONE.format(now)) .queryParam("finishedAt", DATE_FORMAT_WITH_TIMEZONE.format(now)) .queryParam("maxResults", 10) .then() .expect() .statusCode(Status.OK.getStatusCode()) .contentType(MediaType.APPLICATION_JSON) .when() .get(OPTIMIZE_COMPLETED_TASK_INSTANCE_PATH); verify(mockedOptimizeService).getCompletedHistoricTaskInstances(now, now, 10); verifyNoMoreInteractions(mockedOptimizeService); } }
apache-2.0
falko/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/container/impl/deployment/ParseProcessesXmlStep.java
5414
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.container.impl.deployment; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import org.camunda.bpm.application.AbstractProcessApplication; import org.camunda.bpm.application.ProcessApplication; import org.camunda.bpm.application.impl.metadata.ProcessesXmlParser; import org.camunda.bpm.application.impl.metadata.spi.ProcessesXml; import org.camunda.bpm.container.impl.ContainerIntegrationLogger; import org.camunda.bpm.container.impl.spi.DeploymentOperation; import org.camunda.bpm.container.impl.spi.DeploymentOperationStep; import org.camunda.bpm.engine.impl.ProcessEngineLogger; import org.camunda.bpm.engine.impl.util.IoUtil; import static org.camunda.bpm.container.impl.deployment.Attachments.PROCESSES_XML_RESOURCES; import static org.camunda.bpm.container.impl.deployment.Attachments.PROCESS_APPLICATION; /** * <p>Detects and parses all META-INF/processes.xml files within the process application * and attaches the parsed Metadata to the operation context.</p> * * @author Daniel Meyer * */ public class ParseProcessesXmlStep extends DeploymentOperationStep { private final static ContainerIntegrationLogger LOG = ProcessEngineLogger.CONTAINER_INTEGRATION_LOGGER; public String getName() { return "Parse processes.xml deployment descriptor files."; } public void performOperationStep(DeploymentOperation operationContext) { final AbstractProcessApplication processApplication = operationContext.getAttachment(PROCESS_APPLICATION); Map<URL, ProcessesXml> parsedFiles = parseProcessesXmlFiles(processApplication); // attach parsed metadata operationContext.addAttachment(PROCESSES_XML_RESOURCES, parsedFiles); } protected Map<URL, ProcessesXml> parseProcessesXmlFiles(final AbstractProcessApplication processApplication) { String[] deploymentDescriptors = getDeploymentDescriptorLocations(processApplication); List<URL> processesXmlUrls = getProcessesXmlUrls(deploymentDescriptors, processApplication); Map<URL, ProcessesXml> parsedFiles = new HashMap<URL, ProcessesXml>(); // perform parsing for (URL url : processesXmlUrls) { LOG.foundProcessesXmlFile(url.toString()); if(isEmptyFile(url)) { parsedFiles.put(url, ProcessesXml.EMPTY_PROCESSES_XML); LOG.emptyProcessesXml(); } else { parsedFiles.put(url, parseProcessesXml(url)); } } if(parsedFiles.isEmpty()) { LOG.noProcessesXmlForPa(processApplication.getName()); } return parsedFiles; } protected List<URL> getProcessesXmlUrls(String[] deploymentDescriptors, AbstractProcessApplication processApplication) { ClassLoader processApplicationClassloader = processApplication.getProcessApplicationClassloader(); List<URL> result = new ArrayList<URL>(); // load all deployment descriptor files using the classloader of the process application for (String deploymentDescriptor : deploymentDescriptors) { Enumeration<URL> processesXmlFileLocations = null; try { processesXmlFileLocations = processApplicationClassloader.getResources(deploymentDescriptor); } catch (IOException e) { throw LOG.exceptionWhileReadingProcessesXml(deploymentDescriptor, e); } while (processesXmlFileLocations.hasMoreElements()) { result.add(processesXmlFileLocations.nextElement()); } } return result; } protected String[] getDeploymentDescriptorLocations(AbstractProcessApplication processApplication) { ProcessApplication annotation = processApplication.getClass().getAnnotation(ProcessApplication.class); if(annotation == null) { return new String[] {ProcessApplication.DEFAULT_META_INF_PROCESSES_XML}; } else { return annotation.deploymentDescriptors(); } } protected boolean isEmptyFile(URL url) { InputStream inputStream = null; try { inputStream = url.openStream(); return inputStream.available() == 0; } catch (IOException e) { throw LOG.exceptionWhileReadingProcessesXml(url.toString(), e); } finally { IoUtil.closeSilently(inputStream); } } protected ProcessesXml parseProcessesXml(URL url) { final ProcessesXmlParser processesXmlParser = new ProcessesXmlParser(); ProcessesXml processesXml = processesXmlParser.createParse() .sourceUrl(url) .execute() .getProcessesXml(); return processesXml; } }
apache-2.0
hoangsondev/ThinDownloadManager
ThinDownloadManager/src/main/java/com/thin/downloadmanager/DownloadDispatcher.java
14099
package com.thin.downloadmanager; import android.os.Process; import android.util.Log; import org.apache.http.conn.ConnectTimeoutException; import java.io.File; import java.io.FileDescriptor; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.SocketTimeoutException; import java.net.URL; import java.net.URLConnection; import java.util.HashMap; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.BlockingQueue; import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR; import static java.net.HttpURLConnection.HTTP_MOVED_PERM; import static java.net.HttpURLConnection.HTTP_MOVED_TEMP; import static java.net.HttpURLConnection.HTTP_OK; import static java.net.HttpURLConnection.HTTP_SEE_OTHER; import static java.net.HttpURLConnection.HTTP_UNAVAILABLE; public class DownloadDispatcher extends Thread { /** The queue of download requests to service. */ private final BlockingQueue<DownloadRequest> mQueue; /** Used to tell the dispatcher to die. */ private volatile boolean mQuit = false; /** Current Download request that this dispatcher is working */ private DownloadRequest mRequest; /** To Delivery call back response on main thread */ private DownloadRequestQueue.CallBackDelivery mDelivery; /** The buffer size used to stream the data */ public final int BUFFER_SIZE = 4096; /** How many times redirects happened during a download request. */ private int mRedirectionCount = 0; /** The maximum number of redirects. */ public final int MAX_REDIRECTS = 5; // can't be more than 7. private final int HTTP_REQUESTED_RANGE_NOT_SATISFIABLE = 416; private final int HTTP_TEMP_REDIRECT = 307; private long mContentLength; private long mCurrentBytes; boolean shouldAllowRedirects = true; Timer mTimer; /** Tag used for debugging/logging */ public static final String TAG = "ThinDownloadManager"; /** Constructor take the dependency (DownloadRequest queue) that all the Dispatcher needs */ public DownloadDispatcher(BlockingQueue<DownloadRequest> queue, DownloadRequestQueue.CallBackDelivery delivery) { mQueue = queue; mDelivery = delivery; } @Override public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); mTimer = new Timer(); while(true) { try { mRequest = mQueue.take(); mRedirectionCount = 0; Log.v(TAG, "Download initiated for " + mRequest.getDownloadId()); updateDownloadState(DownloadManager.STATUS_STARTED); executeDownload(mRequest.getUri().toString()); } catch (InterruptedException e) { // We may have been interrupted because it was time to quit. if (mQuit) { if(mRequest != null) { mRequest.finish(); updateDownloadFailed(DownloadManager.ERROR_DOWNLOAD_CANCELLED, "Download cancelled"); mTimer.cancel(); } return; } continue; } } } public void quit() { mQuit = true; interrupt(); } private void executeDownload(String downloadUrl) { URL url = null; try { url = new URL(downloadUrl); } catch (MalformedURLException e) { updateDownloadFailed(DownloadManager.ERROR_MALFORMED_URI,"MalformedURLException: URI passed is malformed."); return; } HttpURLConnection conn = null; try { conn = (HttpURLConnection) url.openConnection(); conn.setInstanceFollowRedirects(false); conn.setConnectTimeout(mRequest.getRetryPolicy().getCurrentTimeout()); conn.setReadTimeout(mRequest.getRetryPolicy().getCurrentTimeout()); HashMap<String, String> customHeaders = mRequest.getCustomHeaders(); if (customHeaders != null) { for (String headerName : customHeaders.keySet()) { conn.addRequestProperty(headerName, customHeaders.get(headerName)); } } // Status Connecting is set here before // urlConnection is trying to connect to destination. updateDownloadState(DownloadManager.STATUS_CONNECTING); final int responseCode = conn.getResponseCode(); Log.v(TAG, "Response code obtained for downloaded Id "+mRequest.getDownloadId()+" : httpResponse Code "+responseCode); switch (responseCode) { case HTTP_OK: shouldAllowRedirects = false; if (readResponseHeaders(conn) == 1) { transferData(conn); } else { updateDownloadFailed(DownloadManager.ERROR_DOWNLOAD_SIZE_UNKNOWN, "Can't know size of download, giving up"); } return; case HTTP_MOVED_PERM: case HTTP_MOVED_TEMP: case HTTP_SEE_OTHER: case HTTP_TEMP_REDIRECT: // Take redirect url and call executeDownload recursively until // MAX_REDIRECT is reached. while (mRedirectionCount++ < MAX_REDIRECTS && shouldAllowRedirects) { Log.v(TAG, "Redirect for downloaded Id "+mRequest.getDownloadId()); final String location = conn.getHeaderField("Location"); executeDownload(location); continue; } if (mRedirectionCount > MAX_REDIRECTS) { updateDownloadFailed(DownloadManager.ERROR_TOO_MANY_REDIRECTS, "Too many redirects, giving up"); return; } break; case HTTP_REQUESTED_RANGE_NOT_SATISFIABLE: updateDownloadFailed(HTTP_REQUESTED_RANGE_NOT_SATISFIABLE, conn.getResponseMessage()); break; case HTTP_UNAVAILABLE: updateDownloadFailed(HTTP_UNAVAILABLE, conn.getResponseMessage()); break; case HTTP_INTERNAL_ERROR: updateDownloadFailed(HTTP_INTERNAL_ERROR, conn.getResponseMessage()); break; default: updateDownloadFailed(DownloadManager.ERROR_UNHANDLED_HTTP_CODE, "Unhandled HTTP response:" + responseCode +" message:" +conn.getResponseMessage()); break; } } catch(SocketTimeoutException e) { e.printStackTrace(); // Retry. attemptRetryOnTimeOutException(); } catch (ConnectTimeoutException e) { e.printStackTrace(); attemptRetryOnTimeOutException(); } catch(IOException e){ e.printStackTrace(); updateDownloadFailed(DownloadManager.ERROR_HTTP_DATA_ERROR, "Trouble with low-level sockets"); } finally{ if (conn != null) { conn.disconnect(); } } } private void transferData(HttpURLConnection conn) { InputStream in = null; OutputStream out = null; FileDescriptor outFd = null; cleanupDestination(); try { try { in = conn.getInputStream(); } catch (IOException e) { e.printStackTrace(); } File destinationFile = new File(mRequest.getDestinationURI().getPath().toString()); try { out = new FileOutputStream(destinationFile, true); outFd = ((FileOutputStream) out).getFD(); } catch (IOException e) { e.printStackTrace(); updateDownloadFailed(DownloadManager.ERROR_FILE_ERROR, "Error in writing download contents to the destination file"); } // Start streaming data transferData(in, out); } finally { try { in.close(); } catch (IOException e) { e.printStackTrace(); } try { if (out != null) out.flush(); if (outFd != null) outFd.sync(); } catch (IOException e) { } finally { try { out.close(); } catch (IOException e) { e.printStackTrace(); } } } } private void transferData(InputStream in, OutputStream out) { final byte data[] = new byte[BUFFER_SIZE]; mCurrentBytes = 0; mRequest.setDownloadState(DownloadManager.STATUS_RUNNING); Log.v(TAG, "Content Length: " + mContentLength + " for Download Id " + mRequest.getDownloadId()); for (;;) { if (mRequest.isCanceled()) { Log.v(TAG, "Stopping the download as Download Request is cancelled for Downloaded Id "+mRequest.getDownloadId()); mRequest.finish(); updateDownloadFailed(DownloadManager.ERROR_DOWNLOAD_CANCELLED,"Download cancelled"); return; } int bytesRead = readFromResponse( data, in); if (mContentLength != -1 && mContentLength > 0) { int progress = (int) ((mCurrentBytes * 100) / mContentLength); updateDownloadProgress(progress, mCurrentBytes); } if (bytesRead == -1) { // success, end of stream already reached updateDownloadComplete(); return; } else if (bytesRead == Integer.MIN_VALUE) { return; } writeDataToDestination(data, bytesRead, out); mCurrentBytes += bytesRead; } } private int readFromResponse( byte[] data, InputStream entityStream) { try { return entityStream.read(data); } catch (IOException ex) { if ("unexpected end of stream".equals(ex.getMessage())) { return -1; } updateDownloadFailed(DownloadManager.ERROR_HTTP_DATA_ERROR, "IOException: Failed reading response"); return Integer.MIN_VALUE; } } private void writeDataToDestination(byte[] data, int bytesRead, OutputStream out) { while (true) { try { out.write(data, 0, bytesRead); return; } catch (IOException ex) { updateDownloadFailed(DownloadManager.ERROR_FILE_ERROR, "IOException when writing download contents to the destination file"); } } } private int readResponseHeaders( HttpURLConnection conn){ final String transferEncoding = conn.getHeaderField("Transfer-Encoding"); if (transferEncoding == null) { mContentLength = getHeaderFieldLong(conn, "Content-Length", -1); } else { Log.v(TAG, "Ignoring Content-Length since Transfer-Encoding is also defined for Downloaded Id " + mRequest.getDownloadId()); mContentLength = -1; } if( mContentLength == -1 && (transferEncoding == null || !transferEncoding.equalsIgnoreCase("chunked")) ) { return -1; } else { return 1; } } public long getHeaderFieldLong(URLConnection conn, String field, long defaultValue) { try { return Long.parseLong(conn.getHeaderField(field)); } catch (NumberFormatException e) { return defaultValue; } } private void attemptRetryOnTimeOutException() { updateDownloadState(DownloadManager.STATUS_RETRYING); final RetryPolicy retryPolicy = mRequest.getRetryPolicy(); try { retryPolicy.retry(); mTimer.schedule(new TimerTask() { @Override public void run() { executeDownload(mRequest.getUri().toString()); } }, retryPolicy.getCurrentTimeout()); } catch (RetryError e) { // Update download failed. updateDownloadFailed(DownloadManager.ERROR_CONNECTION_TIMEOUT_AFTER_RETRIES, "Connection time out after maximum retires attempted"); } } /** * Called just before the thread finishes, regardless of status, to take any necessary action on * the downloaded file. */ private void cleanupDestination() { Log.d(TAG, "cleanupDestination() deleting " + mRequest.getDestinationURI().toString()); File destinationFile = new File(mRequest.getDestinationURI().toString()); if(destinationFile.exists()) { destinationFile.delete(); } } public void updateDownloadState(int state) { mRequest.setDownloadState(state); } public void updateDownloadComplete() { mRequest.setDownloadState(DownloadManager.STATUS_SUCCESSFUL); if(mRequest.getDownloadListener() != null) { mDelivery.postDownloadComplete(mRequest); mRequest.finish(); } } public void updateDownloadFailed(int errorCode, String errorMsg) { shouldAllowRedirects = false; mRequest.setDownloadState(DownloadManager.STATUS_FAILED); cleanupDestination(); if(mRequest.getDownloadListener() != null) { mDelivery.postDownloadFailed(mRequest, errorCode, errorMsg); mRequest.finish(); } } public void updateDownloadProgress(int progress, long downloadedBytes) { if(mRequest.getDownloadListener() != null) { mDelivery.postProgressUpdate(mRequest,mContentLength, downloadedBytes, progress); } } }
apache-2.0
ebyhr/presto
lib/trino-orc/src/test/java/io/trino/orc/TestOrcReader.java
1100
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.orc; import org.testng.annotations.Test; import static io.trino.orc.OrcTester.fullOrcTester; import static io.trino.spi.type.DoubleType.DOUBLE; public class TestOrcReader extends AbstractTestOrcReader { public TestOrcReader() { super(OrcTester.quickOrcTester()); } @Test public void testDoubleSequenceFull() throws Exception { // run a single test using the full tester fullOrcTester().testRoundTrip(DOUBLE, doubleSequence(0, 0.1, 30_000)); } }
apache-2.0
smmribeiro/intellij-community
platform/statistics/config/src/com/intellij/internal/statistic/config/EventLogExternalSendSettings.java
1421
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package com.intellij.internal.statistic.config; import com.intellij.internal.statistic.config.bean.EventLogSendConfiguration; import com.intellij.internal.statistic.config.eventLog.EventLogBuildType; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Map; public class EventLogExternalSendSettings { private final Map<String, String> myEndpoints; private final Map<EventLogBuildType, EventLogSendConfiguration> myConfigurations; private final Map<String, String> myOptions; public EventLogExternalSendSettings(@NotNull Map<String, String> endpoints, @NotNull Map<String, String> options, @NotNull Map<EventLogBuildType, EventLogSendConfiguration> configurations) { myEndpoints = endpoints; myOptions = options; myConfigurations = configurations; } public boolean isSendEnabled() { return !myConfigurations.isEmpty(); } @Nullable public EventLogSendConfiguration getConfiguration(@NotNull EventLogBuildType type) { return myConfigurations.get(type); } @Nullable public String getEndpoint(@NotNull String name) { return myEndpoints.get(name); } public Map<String, String> getOptions() { return myOptions; } }
apache-2.0
wildfly/activemq-artemis
tests/activemq5-unit-tests/src/test/java/org/apache/activemq/openwire/v6/ProducerInfoTest.java
2157
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.openwire.v6; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import org.apache.activemq.openwire.*; import org.apache.activemq.command.*; /** * Test case for the OpenWire marshalling for ProducerInfo * * * NOTE!: This file is auto generated - do not modify! * if you need to make a change, please see the modify the groovy scripts in the * under src/gram/script and then use maven openwire:generate to regenerate * this file. */ public class ProducerInfoTest extends BaseCommandTestSupport { public static ProducerInfoTest SINGLETON = new ProducerInfoTest(); public Object createObject() throws Exception { ProducerInfo info = new ProducerInfo(); populateObject(info); return info; } protected void populateObject(Object object) throws Exception { super.populateObject(object); ProducerInfo info = (ProducerInfo) object; info.setProducerId(createProducerId("ProducerId:1")); info.setDestination(createActiveMQDestination("Destination:2")); { BrokerId value[] = new BrokerId[2]; for (int i = 0; i < 2; i++) { value[i] = createBrokerId("BrokerPath:3"); } info.setBrokerPath(value); } info.setDispatchAsync(true); info.setWindowSize(1); } }
apache-2.0
julianpeeters/avro
lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
6884
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred.tether; import java.io.IOException; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.net.InetSocketAddress; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.avro.Schema; import org.apache.avro.ipc.Transceiver; import org.apache.avro.ipc.SaslSocketTransceiver; import org.apache.avro.ipc.specific.SpecificRequestor; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; /** Base class for Java tether mapreduce programs. Useless except for testing, * since it's already possible to write Java MapReduce programs without * tethering. Also serves as an example of how a framework may be * implemented. */ public abstract class TetherTask<IN,MID,OUT> { static final Logger LOG = LoggerFactory.getLogger(TetherTask.class); private Transceiver clientTransceiver; private OutputProtocol outputClient; private TaskType taskType; private int partitions; private DecoderFactory decoderFactory = DecoderFactory.get(); private BinaryDecoder decoder; private SpecificDatumReader<IN> inReader; private SpecificDatumReader<MID> midReader; private IN inRecord; private MID midRecord; private MID midRecordSpare; private Collector<MID> midCollector; private Collector<OUT> outCollector; private static class Buffer extends ByteArrayOutputStream { public ByteBuffer data() { return ByteBuffer.wrap(buf, 0, count); } } /** Collector for map and reduce output values. */ public class Collector<T> { private SpecificDatumWriter<T> writer; private Buffer buffer = new Buffer(); private BinaryEncoder encoder = new EncoderFactory() .configureBlockSize(512).binaryEncoder(buffer, null); private Collector(Schema schema) { this.writer = new SpecificDatumWriter<T>(schema); } /** Collect a map or reduce output value. */ public void collect(T record) throws IOException { buffer.reset(); writer.write(record, encoder); encoder.flush(); outputClient.output(buffer.data()); } /** Collect a pre-partitioned map output value. */ public void collect(T record, int partition) throws IOException { buffer.reset(); writer.write(record, encoder); encoder.flush(); outputClient.outputPartitioned(partition, buffer.data()); } } void open(int inputPort) throws IOException { // open output client, connecting to parent String clientPortString = System.getenv("AVRO_TETHER_OUTPUT_PORT"); if (clientPortString == null) throw new RuntimeException("AVRO_TETHER_OUTPUT_PORT env var is null"); int clientPort = Integer.parseInt(clientPortString); this.clientTransceiver = new SaslSocketTransceiver(new InetSocketAddress(clientPort)); this.outputClient = SpecificRequestor.getClient(OutputProtocol.class, clientTransceiver); // send inputPort to parent outputClient.configure(inputPort); } void configure(TaskType taskType, CharSequence inSchemaText, CharSequence outSchemaText) { this.taskType = taskType; try { Schema inSchema = Schema.parse(inSchemaText.toString()); Schema outSchema = Schema.parse(outSchemaText.toString()); switch (taskType) { case MAP: this.inReader = new SpecificDatumReader<IN>(inSchema); this.midCollector = new Collector<MID>(outSchema); break; case REDUCE: this.midReader = new SpecificDatumReader<MID>(inSchema); this.outCollector = new Collector<OUT>(outSchema); break; } } catch (Throwable e) { fail(e.toString()); } } void partitions(int partitions) { this.partitions = partitions; } /** Return the number of map output partitions of this job. */ public int partitions() { return partitions; } void input(ByteBuffer data, long count) { try { decoder = decoderFactory.binaryDecoder(data.array(), decoder); for (long i = 0; i < count; i++) { switch (taskType) { case MAP: inRecord = inReader.read(inRecord, decoder); map(inRecord, midCollector); break; case REDUCE: MID prev = midRecord; midRecord = midReader.read(midRecordSpare, decoder); if (prev != null && !midRecord.equals(prev)) reduceFlush(prev, outCollector); reduce(midRecord, outCollector); midRecordSpare = prev; break; } } } catch (Throwable e) { LOG.warn("failing: "+e, e); fail(e.toString()); } } void complete() { if (taskType == TaskType.REDUCE && midRecord != null) try { reduceFlush(midRecord, outCollector); } catch (Throwable e) { LOG.warn("failing: "+e, e); fail(e.toString()); } outputClient.complete(); } /** Called with input values to generate intermediate values. */ public abstract void map(IN record, Collector<MID> collector) throws IOException; /** Called with sorted intermediate values. */ public abstract void reduce(MID record, Collector<OUT> collector) throws IOException; /** Called with the last intermediate value in each equivalence run. */ public abstract void reduceFlush(MID record, Collector<OUT> collector) throws IOException; /** Call to update task status. */ public void status(String message) { outputClient.status(message); } /** Call to increment a counter. */ public void count(String group, String name, long amount) { outputClient.count(group, name, amount); } /** Call to fail the task. */ public void fail(String message) { outputClient.fail(message); close(); } void close() { if (clientTransceiver != null) try { clientTransceiver.close(); } catch (IOException e) {} // ignore } }
apache-2.0
AlanJager/zstack
sdk/src/main/java/org/zstack/sdk/UpdateBaremetalChassisAction.java
3750
package org.zstack.sdk; import java.util.HashMap; import java.util.Map; import org.zstack.sdk.*; public class UpdateBaremetalChassisAction extends AbstractAction { private static final HashMap<String, Parameter> parameterMap = new HashMap<>(); private static final HashMap<String, Parameter> nonAPIParameterMap = new HashMap<>(); public static class Result { public ErrorCode error; public org.zstack.sdk.UpdateBaremetalChassisResult value; public Result throwExceptionIfError() { if (error != null) { throw new ApiException( String.format("error[code: %s, description: %s, details: %s]", error.code, error.description, error.details) ); } return this; } } @Param(required = true, nonempty = false, nullElements = false, emptyString = true, noTrim = false) public java.lang.String uuid; @Param(required = false, maxLength = 255, nonempty = false, nullElements = false, emptyString = false, noTrim = false) public java.lang.String name; @Param(required = false, maxLength = 2048, nonempty = false, nullElements = false, emptyString = true, noTrim = false) public java.lang.String description; @Param(required = false, nonempty = false, nullElements = false, emptyString = true, noTrim = false) public java.lang.String ipmiAddress; @Param(required = false, nonempty = false, nullElements = false, emptyString = true, numberRange = {1L,65535L}, noTrim = false) public java.lang.Integer ipmiPort; @Param(required = false, maxLength = 255, nonempty = false, nullElements = false, emptyString = true, noTrim = false) public java.lang.String ipmiUsername; @Param(required = false, maxLength = 255, nonempty = false, nullElements = false, emptyString = true, noTrim = false) public java.lang.String ipmiPassword; @Param(required = false) public java.util.List systemTags; @Param(required = false) public java.util.List userTags; @Param(required = false) public String sessionId; @Param(required = false) public String accessKeyId; @Param(required = false) public String accessKeySecret; @Param(required = false) public String requestIp; @NonAPIParam public long timeout = -1; @NonAPIParam public long pollingInterval = -1; private Result makeResult(ApiResult res) { Result ret = new Result(); if (res.error != null) { ret.error = res.error; return ret; } org.zstack.sdk.UpdateBaremetalChassisResult value = res.getResult(org.zstack.sdk.UpdateBaremetalChassisResult.class); ret.value = value == null ? new org.zstack.sdk.UpdateBaremetalChassisResult() : value; return ret; } public Result call() { ApiResult res = ZSClient.call(this); return makeResult(res); } public void call(final Completion<Result> completion) { ZSClient.call(this, new InternalCompletion() { @Override public void complete(ApiResult res) { completion.complete(makeResult(res)); } }); } protected Map<String, Parameter> getParameterMap() { return parameterMap; } protected Map<String, Parameter> getNonAPIParameterMap() { return nonAPIParameterMap; } protected RestInfo getRestInfo() { RestInfo info = new RestInfo(); info.httpMethod = "PUT"; info.path = "/baremetal/chassis/{uuid}/actions"; info.needSession = true; info.needPoll = true; info.parameterName = "updateBaremetalChassis"; return info; } }
apache-2.0
dhanuka84/andes
modules/andes-core/broker/src/main/java/org/wso2/andes/qmf/QMFCommandDecoder.java
2905
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.andes.qmf; import org.wso2.andes.transport.codec.BBDecoder; import java.nio.ByteBuffer; public class QMFCommandDecoder { private BBDecoder _decoder; private static final QMFOperation[] OP_CODES = new QMFOperation[256]; private final QMFService _qmfService; static { for(QMFOperation op : QMFOperation.values()) { OP_CODES[op.getOpcode()] = op; } } public QMFCommandDecoder(final QMFService qmfService, ByteBuffer buf) { _qmfService = qmfService; _decoder = new BBDecoder(); _decoder.init(buf); } public QMFCommand decode() { if(_decoder.hasRemaining()) { QMFCommandHeader header = readQMFHeader(); switch(header.getOperation()) { case BROKER_REQUEST: return new QMFBrokerRequestCommand(header, _decoder); case PACKAGE_QUERY: return new QMFPackageQueryCommand(header, _decoder); case CLASS_QUERY: return new QMFClassQueryCommand(header, _decoder); case SCHEMA_REQUEST: return new QMFSchemaRequestCommand(header, _decoder); case METHOD_REQUEST: return new QMFMethodRequestCommand(header, _decoder, _qmfService); case GET_QUERY: return new QMFGetQueryCommand(header, _decoder); default: System.out.println("Unknown command"); } return null; } else { return null; } } private QMFCommandHeader readQMFHeader() { if(_decoder.readInt8() == (byte) 'A' && _decoder.readInt8() == (byte) 'M') { byte version = _decoder.readInt8(); short opCode = _decoder.readUint8(); int seq = _decoder.readInt32(); return new QMFCommandHeader(version, seq, OP_CODES[opCode]); } return null; } }
apache-2.0
vt09/bazel
src/test/java/com/google/devtools/build/lib/testutil/TestRuleClassProvider.java
3348
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.testutil; import static com.google.devtools.build.lib.packages.Attribute.attr; import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST; import static com.google.devtools.build.lib.packages.BuildType.OUTPUT_LIST; import static com.google.devtools.build.lib.syntax.Type.INTEGER; import static com.google.devtools.build.lib.syntax.Type.STRING_LIST; import com.google.devtools.build.lib.analysis.BaseRuleClasses; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.RuleDefinition; import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.RuleClass.Builder; import com.google.devtools.build.lib.util.FileTypeSet; import java.lang.reflect.Method; /** * Helper class to provide a RuleClassProvider for tests. */ public class TestRuleClassProvider { private static ConfiguredRuleClassProvider ruleProvider = null; /** * Adds all the rule classes supported internally within the build tool to the given builder. */ public static void addStandardRules(ConfiguredRuleClassProvider.Builder builder) { try { Class<?> providerClass = Class.forName(TestConstants.TEST_RULE_CLASS_PROVIDER); Method setupMethod = providerClass.getMethod("setup", ConfiguredRuleClassProvider.Builder.class); setupMethod.invoke(null, builder); } catch (Exception e) { throw new IllegalStateException(e); } } /** * Return a rule class provider. */ public static ConfiguredRuleClassProvider getRuleClassProvider() { if (ruleProvider == null) { ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder(); addStandardRules(builder); builder.addRuleDefinition(new TestingDummyRule()); ruleProvider = builder.build(); } return ruleProvider; } public static final class TestingDummyRule implements RuleDefinition { @Override public RuleClass build(Builder builder, RuleDefinitionEnvironment env) { return builder .setUndocumented() .add(attr("srcs", LABEL_LIST).allowedFileTypes(FileTypeSet.ANY_FILE)) .add(attr("outs", OUTPUT_LIST)) .add(attr("dummystrings", STRING_LIST)) .add(attr("dummyinteger", INTEGER)) .build(); } @Override public Metadata getMetadata() { return RuleDefinition.Metadata.builder() .name("testing_dummy_rule") .ancestors(BaseRuleClasses.RuleBase.class) .factoryClass(UnknownRuleConfiguredTarget.class) .build(); } } }
apache-2.0
tufangorel/hazelcast
hazelcast/src/test/java/com/hazelcast/test/mocknetwork/StaticAddressPicker.java
1257
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.test.mocknetwork; import com.hazelcast.instance.AddressPicker; import com.hazelcast.nio.Address; import java.nio.channels.ServerSocketChannel; class StaticAddressPicker implements AddressPicker { private final Address thisAddress; StaticAddressPicker(Address thisAddress) { this.thisAddress = thisAddress; } public void pickAddress() { } public Address getBindAddress() { return thisAddress; } public Address getPublicAddress() { return thisAddress; } public ServerSocketChannel getServerSocketChannel() { return null; } }
apache-2.0
joewalnes/idea-community
java/debugger/impl/src/com/intellij/debugger/actions/HotSwapAction.java
2280
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.debugger.actions; import com.intellij.debugger.DebuggerManagerEx; import com.intellij.debugger.impl.DebuggerSession; import com.intellij.debugger.settings.DebuggerSettings; import com.intellij.debugger.ui.HotSwapUI; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.project.Project; /** * @author lex */ public class HotSwapAction extends AnAction{ public void actionPerformed(AnActionEvent e) { DataContext dataContext = e.getDataContext(); Project project = PlatformDataKeys.PROJECT.getData(dataContext); if(project == null) { return; } DebuggerManagerEx debuggerManager = DebuggerManagerEx.getInstanceEx(project); DebuggerSession session = debuggerManager.getContext().getDebuggerSession(); if(session != null && session.isAttached()) { HotSwapUI.getInstance(project).reloadChangedClasses(session, DebuggerSettings.getInstance().COMPILE_BEFORE_HOTSWAP); } } public void update(AnActionEvent e) { DataContext dataContext = e.getDataContext(); Project project = PlatformDataKeys.PROJECT.getData(dataContext); if(project == null) { e.getPresentation().setEnabled(false); return; } DebuggerManagerEx debuggerManager = DebuggerManagerEx.getInstanceEx(project); DebuggerSession session = debuggerManager.getContext().getDebuggerSession(); e.getPresentation().setEnabled(session != null && session.isAttached() && session.getProcess().canRedefineClasses()); } }
apache-2.0
arifogel/batfish
projects/batfish/src/main/java/org/batfish/grammar/cumulus_nclu/CumulusNcluControlPlaneExtractor.java
1941
package org.batfish.grammar.cumulus_nclu; import java.util.Set; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTreeWalker; import org.batfish.common.NetworkSnapshot; import org.batfish.common.Warnings; import org.batfish.grammar.BatfishParseTreeWalker; import org.batfish.grammar.ControlPlaneExtractor; import org.batfish.grammar.ImplementedRules; import org.batfish.grammar.silent_syntax.SilentSyntaxCollection; import org.batfish.representation.cumulus_nclu.CumulusNcluConfiguration; import org.batfish.vendor.VendorConfiguration; /** * A {@link ControlPlaneExtractor} that produces a {@link CumulusNcluConfiguration} from a parse * tree returned by {@link CumulusNcluCombinedParser#parse}. */ public class CumulusNcluControlPlaneExtractor implements ControlPlaneExtractor { private CumulusNcluConfiguration _configuration; private final CumulusNcluCombinedParser _parser; private final String _text; private final Warnings _w; private final SilentSyntaxCollection _silentSyntax; public CumulusNcluControlPlaneExtractor( String fileText, CumulusNcluCombinedParser combinedParser, Warnings warnings, SilentSyntaxCollection silentSyntax) { _text = fileText; _parser = combinedParser; _w = warnings; _silentSyntax = silentSyntax; } @Override public VendorConfiguration getVendorConfiguration() { return _configuration; } @Override public Set<String> implementedRuleNames() { return ImplementedRules.getImplementedRules(CumulusNcluConfigurationBuilder.class); } @Override public void processParseTree(NetworkSnapshot snapshot, ParserRuleContext tree) { CumulusNcluConfigurationBuilder cb = new CumulusNcluConfigurationBuilder(_parser, _text, _w, _silentSyntax); ParseTreeWalker walker = new BatfishParseTreeWalker(_parser); walker.walk(cb, tree); _configuration = cb.getConfiguration(); } }
apache-2.0
camunda/camunda-bpm-platform
qa/test-db-instance-migration/test-fixture-73/src/main/java/org/camunda/bpm/qa/upgrade/scenarios/compensation/InterruptingEventSubProcessCompensationScenario.java
2223
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.qa.upgrade.scenarios.compensation; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.task.Task; import org.camunda.bpm.engine.test.Deployment; import org.camunda.bpm.qa.upgrade.DescribesScenario; import org.camunda.bpm.qa.upgrade.ScenarioSetup; import org.camunda.bpm.qa.upgrade.Times; /** * @author Thorben Lindhauer * */ public class InterruptingEventSubProcessCompensationScenario { @Deployment public static String deployProcess() { return "org/camunda/bpm/qa/upgrade/compensation/interruptingEventSubProcessCompensationProcess.bpmn20.xml"; } @DescribesScenario("init.throwCompensate") @Times(4) public static ScenarioSetup instantiateThrowCompensate() { return new ScenarioSetup() { public void execute(ProcessEngine engine, String scenarioName) { engine .getRuntimeService() .startProcessInstanceByKey("InterruptingEventSubProcessCompensationScenario", scenarioName); // trigger the event subprocess engine.getRuntimeService().correlateMessage("EventSubProcessMessage"); // complete the task to compensate and then throw compensation Task eventSubProcessTask = engine.getTaskService().createTaskQuery() .processInstanceBusinessKey(scenarioName).singleResult(); engine.getTaskService().complete(eventSubProcessTask.getId()); } }; } }
apache-2.0
gwtbootstrap3/gwtbootstrap3-extras
src/main/java/org/gwtbootstrap3/extras/select/client/ui/event/LoadedEvent.java
1924
package org.gwtbootstrap3.extras.select.client.ui.event; /* * #%L * GwtBootstrap3 * %% * Copyright (C) 2013 - 2016 GwtBootstrap3 * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.gwt.event.shared.GwtEvent; /** * The loaded event is fired immediately when the Loaded instance method is called. */ public class LoadedEvent extends GwtEvent<LoadedHandler> { private static Type<LoadedHandler> TYPE; /** * Fires a loaded event on all registered handlers in the handler manager. If * no such handlers exist, this method will do nothing. * * @param source the source of the handlers */ public static void fire(final HasLoadedHandlers source) { if (TYPE != null) { LoadedEvent event = new LoadedEvent(); source.fireEvent(event); } } /** * Gets the type associated with this event. * * @return returns the handler type */ public static Type<LoadedHandler> getType() { if (TYPE == null) { TYPE = new Type<LoadedHandler>(); } return TYPE; } @Override public Type<LoadedHandler> getAssociatedType() { return TYPE; } @Override protected void dispatch(final LoadedHandler handler) { handler.onLoaded(this); } /** * Creates a loaded event. */ protected LoadedEvent() {} }
apache-2.0
godfreyhe/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/connector/sink2/GlobalCommitterOperator.java
7334
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.connector.sink2; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerializer; import org.apache.flink.api.connector.sink2.Committer; import org.apache.flink.core.io.SimpleVersionedSerializer; import org.apache.flink.runtime.state.StateInitializationContext; import org.apache.flink.runtime.state.StateSnapshotContext; import org.apache.flink.streaming.api.graph.StreamConfig; import org.apache.flink.streaming.api.operators.AbstractStreamOperator; import org.apache.flink.streaming.api.operators.BoundedOneInput; import org.apache.flink.streaming.api.operators.OneInputStreamOperator; import org.apache.flink.streaming.api.operators.Output; import org.apache.flink.streaming.api.operators.util.SimpleVersionedListState; import org.apache.flink.streaming.runtime.operators.sink.committables.CheckpointCommittableManager; import org.apache.flink.streaming.runtime.operators.sink.committables.CommittableCollector; import org.apache.flink.streaming.runtime.operators.sink.committables.CommittableCollectorSerializer; import org.apache.flink.streaming.runtime.operators.sink.committables.CommittableManager; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.tasks.StreamTask; import org.apache.flink.util.function.SerializableSupplier; import java.io.IOException; import java.util.Collection; import java.util.Collections; import static org.apache.flink.util.Preconditions.checkNotNull; class GlobalCommitterOperator<CommT> extends AbstractStreamOperator<Void> implements OneInputStreamOperator<CommittableMessage<CommT>, Void>, BoundedOneInput { /** The operator's state descriptor. */ private static final ListStateDescriptor<byte[]> GLOBAL_COMMITTER_OPERATOR_RAW_STATES_DESC = new ListStateDescriptor<>( "global_committer_raw_states", BytePrimitiveArraySerializer.INSTANCE); private final SerializableSupplier<Committer<CommT>> committerFactory; private final SerializableSupplier<SimpleVersionedSerializer<CommT>> committableSerializerFactory; private ListState<CommittableCollector<CommT>> committableCollectorState; private Committer<CommT> committer; private CommittableCollector<CommT> committableCollector; private long lastCompletedCheckpointId = -1; private SimpleVersionedSerializer<CommT> committableSerializer; GlobalCommitterOperator( SerializableSupplier<Committer<CommT>> committerFactory, SerializableSupplier<SimpleVersionedSerializer<CommT>> committableSerializerFactory) { this.committerFactory = checkNotNull(committerFactory); this.committableSerializerFactory = checkNotNull(committableSerializerFactory); } @Override public void setup( StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<Void>> output) { super.setup(containingTask, config, output); committer = committerFactory.get(); committableCollector = CommittableCollector.of(getRuntimeContext()); committableSerializer = committableSerializerFactory.get(); } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); // It is important to copy the collector to not mutate the state. committableCollectorState.update(Collections.singletonList(committableCollector.copy())); } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); committableCollectorState = new SimpleVersionedListState<>( context.getOperatorStateStore() .getListState(GLOBAL_COMMITTER_OPERATOR_RAW_STATES_DESC), new CommittableCollectorSerializer<>( committableSerializer, getRuntimeContext().getIndexOfThisSubtask(), getRuntimeContext().getMaxNumberOfParallelSubtasks())); if (context.isRestored()) { committableCollectorState.get().forEach(cc -> committableCollector.merge(cc)); lastCompletedCheckpointId = context.getRestoredCheckpointId().getAsLong(); // try to re-commit recovered transactions as quickly as possible commit(lastCompletedCheckpointId); } } @Override public void notifyCheckpointComplete(long checkpointId) throws Exception { super.notifyCheckpointComplete(checkpointId); lastCompletedCheckpointId = Math.max(lastCompletedCheckpointId, checkpointId); commit(lastCompletedCheckpointId); } private Collection<? extends CheckpointCommittableManager<CommT>> getCommittables() { final Collection<? extends CheckpointCommittableManager<CommT>> committables = committableCollector.getEndOfInputCommittables(); if (committables == null) { return Collections.emptyList(); } return committables; } private Collection<? extends CheckpointCommittableManager<CommT>> getCommittables( long checkpointId) { final Collection<? extends CheckpointCommittableManager<CommT>> committables = committableCollector.getCheckpointCommittablesUpTo(checkpointId); if (committables == null) { return Collections.emptyList(); } return committables; } private void commit(long checkpointId) throws IOException, InterruptedException { for (CheckpointCommittableManager<CommT> committable : getCommittables(checkpointId)) { boolean fullyReceived = committable.getCheckpointId() == lastCompletedCheckpointId; committable.commit(fullyReceived, committer); } } @Override public void endInput() throws Exception { do { for (CommittableManager<CommT> committable : getCommittables()) { committable.commit(false, committer); } } while (!committableCollector.isFinished()); } @Override public void processElement(StreamRecord<CommittableMessage<CommT>> element) throws Exception { committableCollector.addMessage(element.getValue()); } }
apache-2.0
quickbundle/qb-archetype
quickbundle-rmwebdemo/src/main/java/org/quickbundle/project/login/RmSessionService.java
8874
package org.quickbundle.project.login; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.jws.WebMethod; import javax.jws.WebService; import javax.servlet.http.HttpSession; import org.apache.cxf.jaxws.JaxWsProxyFactoryBean; import org.quickbundle.base.beans.factory.RmBeanFactory; import org.quickbundle.config.RmClusterConfig; import org.quickbundle.config.RmConfig; import org.quickbundle.project.IGlobalConstants; import org.quickbundle.project.RmProjectHelper; import org.quickbundle.project.common.vo.RmCommonVo; import org.quickbundle.project.listener.RmSessionListener; import org.quickbundle.project.login.RmUserVo.RmUserSessionVo; import org.quickbundle.tools.helper.RmPopulateHelper; import org.quickbundle.tools.helper.RmSqlHelper; import org.quickbundle.util.RmSequenceMap; import org.springframework.jdbc.core.RowMapper; @WebService(targetNamespace="http://login.project.quickbundle.org/", endpointInterface = "org.quickbundle.project.login.IRmSessionService") public class RmSessionService implements IRmSessionService { public static IRmSessionService getRemoteSessionService(String clusterNodeId) { String callWsUrl = RmClusterConfig.getSingleton().getSelfNode().get(RmClusterConfig.NodeKey.webServiceUrl.name()); if(callWsUrl == null) { return null; } String address = callWsUrl + "RmSession"; JaxWsProxyFactoryBean jw = new JaxWsProxyFactoryBean(); jw.setServiceClass(IRmSessionService.class); jw.setAddress(address); Object obj = jw.create(); IRmSessionService ss = (IRmSessionService) obj; return ss; } private IRmLoginService getLoginService() { return (IRmLoginService) RmBeanFactory.getBean(IRmLoginService.class.getName()); //得到Service对象,受事务控制 } @WebMethod(exclude=true) public int getRecordCount(String queryCondition) { String yesterday = new Timestamp(System.currentTimeMillis() - 1000*60*60*24*1).toString().substring(0, 19); String sql = "select count(*) from RM_USER u " + "join RM_USER_ONLINE_RECORD uor on (u.id=uor.user_id and u.last_login_date=uor.login_time and uor.logout_time is null) " + "where u.usable_status='1' and u.login_status='1' and u.last_login_date>" + RmSqlHelper.getSqlDateStr(yesterday); return RmProjectHelper.getCommonServiceInstance().doQueryForInt(sql); } @WebMethod(exclude=true) public List<RmUserSessionVo> queryByCondition(String queryCondition, String orderStr, int startIndex, int size) { List<RmUserSessionVo> result = new ArrayList<RmUserVo.RmUserSessionVo>(); final Map<String, HttpSession> mSession = RmSessionListener.getSessions(); String yesterday = new Timestamp(System.currentTimeMillis() - 1000*60*60*24*1).toString().substring(0, 19); String sql = "select uor.cluster_node_id, uor.login_sign, uor.login_uuid, u.* from RM_USER u " + "join RM_USER_ONLINE_RECORD uor on (u.id=uor.user_id and u.last_login_date=uor.login_time and uor.logout_time is null) " + "where u.usable_status='1' and u.login_status='1' and u.last_login_date>" + RmSqlHelper.getSqlDateStr(yesterday) + " order by u.last_login_date desc"; //sessionId -> RmUserSessionVo对象 final Map<String, RmUserSessionVo> mResult = new RmSequenceMap<String, RmUserSessionVo>(); //对clusterNodeId分组存放 final Map<String, List<String>> mOther = new RmSequenceMap<String, List<String>>(); RmProjectHelper.getCommonServiceInstance().doQueryStartIndex(sql, new RowMapper() { public Object mapRow(ResultSet rs, int rowNum) throws SQLException { RmUserSessionVo vo = new RmUserSessionVo(); RmPopulateHelper.populate(vo, rs); vo.setSessionId(rs.getString("login_sign")); //登录服务器主机名 vo.setClusterNodeId(rs.getString("cluster_node_id")); if(mSession.containsKey(vo.getSessionId())) { //如果在本机 HttpSession session = mSession.get(vo.getSessionId()); populateSessionVo(vo, session); } else { if(!mOther.containsKey(vo.getClusterNodeId())) { mOther.put(vo.getClusterNodeId(), new ArrayList<String>()); } mOther.get(vo.getClusterNodeId()).add(vo.getSessionId()); } mResult.put(vo.getSessionId(), vo); return null; } }, startIndex, size); //通过soa查询其他节点的session for(String clusterNodeId : mOther.keySet()) { String[] sessionIds = mOther.get(clusterNodeId).toArray(new String[0]); try { IRmSessionService remoteSs = getRemoteSessionService(clusterNodeId); if(remoteSs == null) { continue; } List<RmUserSessionVo> lBrother = remoteSs.listSessionLocal(sessionIds); for(RmUserSessionVo sourceVo : lBrother) { if(sourceVo == null) { continue; } RmUserSessionVo destinationVo = mResult.get(sourceVo.getSessionId()); if(sourceVo.getId() != null) { RmPopulateHelper.populate(destinationVo, sourceVo); } else { populateSessionVo(destinationVo, sourceVo); } } } catch (Exception e) { e.printStackTrace(); } } for(String sessionId : mResult.keySet()) { result.add(mResult.get(sessionId)); } return result; } public RmUserSessionVo findSessionLocal(String sessionId) { List<RmUserSessionVo> lvo = listSessionLocal(new String[]{sessionId}); if(lvo.size() > 0) { return lvo.get(0); } else { return null; } } private void populateSessionVo(RmUserSessionVo target, HttpSession session) { target.setSessionId(session.getId()); //session创建时间 target.setCreationTime(session.getCreationTime()); //最后访问时间 target.setLastAccessedTime(session.getLastAccessedTime()); //最大非活动间隔 target.setMaxInactiveInterval(session.getMaxInactiveInterval() * 1000); { //custom owner org this login, cluster begin if(session.getAttribute(IGlobalConstants.RM_USER_VO) != null) { target.setParty_id_org_name(((org.quickbundle.project.login.RmUserVo)session.getAttribute(IGlobalConstants.RM_USER_VO)).getParty_id_org_name()); } } //custom owner org this login, cluster end } private void populateSessionVo(RmUserSessionVo target, RmUserSessionVo source) { target.setCreationTime(source.getCreationTime()); target.setLastAccessedTime(source.getLastAccessedTime()); target.setMaxInactiveInterval(source.getMaxInactiveInterval()); target.setParty_id_org_name(source.getParty_id_org_name()); } @WebMethod(exclude=true) public boolean forceLogoutUser(String user_id, String session_id) { String msg = "您被管理员强制退出了,请重新登录。如有帐号异常,请联系管理员。"; boolean sendRemoteSuccess = false; if(RmConfig.getSingleton().isClusterMode()) { //销毁集群下兄弟节点的session if(user_id != null && user_id.length() > 0) { List<RmCommonVo> lvo = RmProjectHelper.getCommonServiceInstance().doQuery("select * from RM_USER_ONLINE_RECORD where user_id='" + user_id + "' and login_sign='" + session_id + "'"); if(lvo.size() > 0) { RmCommonVo vo = lvo.get(0); String cluster_node_id = vo.getString("cluster_node_id"); IRmSessionService remoteSs = getRemoteSessionService(cluster_node_id); if(remoteSs != null) { int result = remoteSs.forceLogoutUserLocal(new String[]{session_id}, msg); sendRemoteSuccess = result == 1; } } } } if(!sendRemoteSuccess) { //如果单机模式或集群模式下调用远程失败,本地执行清理动作 forceLogoutUserLocal(new String[]{session_id}, msg); } return true; } public List<RmUserSessionVo> listSessionLocal(String[] sessionIds) { final Map<String, HttpSession> mSession = RmSessionListener.getSessions(); List<RmUserSessionVo> result = new ArrayList<RmUserVo.RmUserSessionVo>(); for(String sessionId : sessionIds) { RmUserSessionVo vo = null; HttpSession session = mSession.get(sessionId); if(session != null) { vo = new RmUserSessionVo(); if(session.getAttribute(IGlobalConstants.RM_USER_VO) != null) { org.quickbundle.project.login.RmUserVo userVo = (org.quickbundle.project.login.RmUserVo)session.getAttribute(IGlobalConstants.RM_USER_VO); RmPopulateHelper.populate(vo, userVo); populateSessionVo(vo, session); } } result.add(vo); } return result; } public int forceLogoutUserLocal(String[] sessionIds, String message) { int result = 0; for(String sessionId : sessionIds) { HttpSession session = RmSessionListener.getSessionById(sessionId); if(session != null) { session.setAttribute(IRmLoginConstants.LOGOUT_TYPE, IRmLoginConstants.LogoutType.FORCE_LOGOUT.value()); getLoginService().executeDestroyUserInfo(session); session.setAttribute(IGlobalConstants.SystemPara.system_message.name(), message); result ++; } } return result; } }
apache-2.0
cschenyuan/hive-hack
ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
50348
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.udf.ptf; import java.util.AbstractList; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.PTFOperator; import org.apache.hadoop.hive.ql.exec.PTFPartition; import org.apache.hadoop.hive.ql.exec.WindowFunctionInfo; import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator; import org.apache.hadoop.hive.ql.exec.PTFRollingPartition; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec; import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction; import org.apache.hadoop.hive.ql.plan.PTFDesc; import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef; import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef; import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef; import org.apache.hadoop.hive.ql.plan.ptf.ValueBoundaryDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer; import org.apache.hadoop.hive.ql.udf.generic.ISupportStreamingModeForWindowing; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; @SuppressWarnings("deprecation") public class WindowingTableFunction extends TableFunctionEvaluator { StreamingState streamingState; RankLimit rnkLimitDef; @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException { ArrayList<List<?>> oColumns = new ArrayList<List<?>>(); PTFPartition iPart = pItr.getPartition(); StructObjectInspector inputOI; inputOI = (StructObjectInspector) iPart.getOutputOI(); WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef(); Order order = wTFnDef.getOrder().getExpressions().get(0).getOrder(); for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) { boolean processWindow = processWindow(wFn); pItr.reset(); if ( !processWindow ) { Object out = evaluateWindowFunction(wFn, pItr); if ( !wFn.isPivotResult()) { out = new SameList(iPart.size(), out); } oColumns.add((List<?>)out); } else { oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart, order)); } } /* * Output Columns in the following order * - the columns representing the output from Window Fns * - the input Rows columns */ for(int i=0; i < iPart.size(); i++) { ArrayList oRow = new ArrayList(); Object iRow = iPart.getAt(i); for(int j=0; j < oColumns.size(); j++) { oRow.add(oColumns.get(j).get(i)); } for(StructField f : inputOI.getAllStructFieldRefs()) { oRow.add(inputOI.getStructFieldData(iRow, f)); } outP.append(oRow); } } Object evaluateWindowFunction(WindowFunctionDef wFn, PTFPartitionIterator<Object> pItr) throws HiveException { GenericUDAFEvaluator fEval = wFn.getWFnEval(); Object[] args = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()]; AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer(); while(pItr.hasNext()) { Object row = pItr.next(); int i =0; if ( wFn.getArgs() != null ) { for(PTFExpressionDef arg : wFn.getArgs()) { args[i++] = arg.getExprEvaluator().evaluate(row); } } fEval.aggregate(aggBuffer, args); } Object out = fEval.evaluate(aggBuffer); out = ObjectInspectorUtils.copyToStandardObject(out, wFn.getOI()); return out; } private boolean processWindow(WindowFunctionDef wFn) { WindowFrameDef frame = wFn.getWindowFrame(); if ( frame == null ) { return false; } if ( frame.getStart().getAmt() == BoundarySpec.UNBOUNDED_AMOUNT && frame.getEnd().getAmt() == BoundarySpec.UNBOUNDED_AMOUNT ) { return false; } return true; } private boolean streamingPossible(Configuration cfg, WindowFunctionDef wFnDef) { WindowFrameDef wdwFrame = wFnDef.getWindowFrame(); WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFnDef .getName()); if (!wFnInfo.isSupportsWindow()) { return true; } BoundaryDef start = wdwFrame.getStart(); BoundaryDef end = wdwFrame.getEnd(); /* * Currently we are not handling dynamic sized windows implied by range * based windows. */ if (start instanceof ValueBoundaryDef || end instanceof ValueBoundaryDef) { return false; } /* * Windows that are unbounded following don't benefit from Streaming. */ if (end.getAmt() == BoundarySpec.UNBOUNDED_AMOUNT) { return false; } /* * let function decide if it can handle this special case. */ if (start.getAmt() == BoundarySpec.UNBOUNDED_AMOUNT) { return true; } int windowLimit = HiveConf.getIntVar(cfg, ConfVars.HIVEJOINCACHESIZE); if (windowLimit < (start.getAmt() + end.getAmt() + 1)) { return false; } return true; } /* * (non-Javadoc) * * @see * org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator#canAcceptInputAsStream * () * * WindowTableFunction supports streaming if all functions meet one of these * conditions: 1. The Function implements ISupportStreamingModeForWindowing 2. * Or returns a non null Object for the getWindowingEvaluator, that implements * ISupportStreamingModeForWindowing. 3. Is an invocation on a 'fixed' window. * So no Unbounded Preceding or Following. */ @SuppressWarnings("resource") private int[] setCanAcceptInputAsStream(Configuration cfg) { canAcceptInputAsStream = false; if (ptfDesc.getLlInfo().getLeadLagExprs() != null) { return null; } WindowTableFunctionDef tabDef = (WindowTableFunctionDef) getTableDef(); int precedingSpan = 0; int followingSpan = 0; for (int i = 0; i < tabDef.getWindowFunctions().size(); i++) { WindowFunctionDef wFnDef = tabDef.getWindowFunctions().get(i); WindowFrameDef wdwFrame = wFnDef.getWindowFrame(); GenericUDAFEvaluator fnEval = wFnDef.getWFnEval(); boolean streamingPossible = streamingPossible(cfg, wFnDef); GenericUDAFEvaluator streamingEval = streamingPossible ? fnEval .getWindowingEvaluator(wdwFrame) : null; if (streamingEval != null && streamingEval instanceof ISupportStreamingModeForWindowing) { continue; } BoundaryDef start = wdwFrame.getStart(); BoundaryDef end = wdwFrame.getEnd(); if (!(end instanceof ValueBoundaryDef) && !(start instanceof ValueBoundaryDef)) { if (end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT && start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT && end.getDirection() != Direction.PRECEDING && start.getDirection() != Direction.FOLLOWING) { int amt = wdwFrame.getStart().getAmt(); if (amt > precedingSpan) { precedingSpan = amt; } amt = wdwFrame.getEnd().getAmt(); if (amt > followingSpan) { followingSpan = amt; } continue; } } return null; } int windowLimit = HiveConf.getIntVar(cfg, ConfVars.HIVEJOINCACHESIZE); if (windowLimit < (followingSpan + precedingSpan + 1)) { return null; } canAcceptInputAsStream = true; return new int[] {precedingSpan, followingSpan}; } @Override public void initializeStreaming(Configuration cfg, StructObjectInspector inputOI, boolean isMapSide) throws HiveException { int[] span = setCanAcceptInputAsStream(cfg); if (!canAcceptInputAsStream) { return; } WindowTableFunctionDef tabDef = (WindowTableFunctionDef) getTableDef(); for (int i = 0; i < tabDef.getWindowFunctions().size(); i++) { WindowFunctionDef wFnDef = tabDef.getWindowFunctions().get(i); WindowFrameDef wdwFrame = wFnDef.getWindowFrame(); GenericUDAFEvaluator fnEval = wFnDef.getWFnEval(); GenericUDAFEvaluator streamingEval = fnEval .getWindowingEvaluator(wdwFrame); if (streamingEval != null) { wFnDef.setWFnEval(streamingEval); if (wFnDef.isPivotResult()) { ListObjectInspector listOI = (ListObjectInspector) wFnDef.getOI(); wFnDef.setOI(listOI.getListElementObjectInspector()); } } } if ( tabDef.getRankLimit() != -1 ) { rnkLimitDef = new RankLimit(tabDef.getRankLimit(), tabDef.getRankLimitFunction(), tabDef.getWindowFunctions()); } streamingState = new StreamingState(cfg, inputOI, isMapSide, tabDef, span[0], span[1]); } /* * (non-Javadoc) * * @see * org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator#startPartition() */ @Override public void startPartition() throws HiveException { WindowTableFunctionDef tabDef = (WindowTableFunctionDef) getTableDef(); streamingState.reset(tabDef); } /* * (non-Javadoc) * * @see * org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator#processRow(java * .lang.Object) * * - hand row to each Function, provided there are enough rows for Function's * window. - call getNextObject on each Function. - output as many rows as * possible, based on minimum sz of Output List */ @Override public List<Object> processRow(Object row) throws HiveException { /* * Once enough rows have been output, there is no need to process input rows. */ if ( streamingState.rankLimitReached() ) { return null; } streamingState.rollingPart.append(row); row = streamingState.rollingPart .getAt(streamingState.rollingPart.size() - 1); WindowTableFunctionDef tabDef = (WindowTableFunctionDef) getTableDef(); for (int i = 0; i < tabDef.getWindowFunctions().size(); i++) { WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i); GenericUDAFEvaluator fnEval = wFn.getWFnEval(); int a = 0; if (wFn.getArgs() != null) { for (PTFExpressionDef arg : wFn.getArgs()) { streamingState.funcArgs[i][a++] = arg.getExprEvaluator().evaluate(row); } } if (fnEval instanceof ISupportStreamingModeForWindowing) { fnEval.aggregate(streamingState.aggBuffers[i], streamingState.funcArgs[i]); Object out = ((ISupportStreamingModeForWindowing) fnEval) .getNextResult(streamingState.aggBuffers[i]); if (out != null) { streamingState.fnOutputs[i] .add(out == ISupportStreamingModeForWindowing.NULL_RESULT ? null : out); } } else { int rowToProcess = streamingState.rollingPart.rowToProcess(wFn); if (rowToProcess >= 0) { Range rng = getRange(wFn, rowToProcess, streamingState.rollingPart, streamingState.order); PTFPartitionIterator<Object> rItr = rng.iterator(); PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, rItr); Object out = evaluateWindowFunction(wFn, rItr); streamingState.fnOutputs[i].add(out); } } } List<Object> oRows = new ArrayList<Object>(); while (true) { boolean hasRow = streamingState.hasOutputRow(); if (!hasRow) { break; } oRows.add(streamingState.nextOutputRow()); } return oRows.size() == 0 ? null : oRows; } /* * (non-Javadoc) * * @see * org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator#finishPartition() * * for fns that are not ISupportStreamingModeForWindowing give them the * remaining rows (rows whose span went beyond the end of the partition) for * rest of the functions invoke terminate. * * while numOutputRows < numInputRows for each Fn that doesn't have enough o/p * invoke getNextObj if there is no O/p then flag this as an error. */ @Override public List<Object> finishPartition() throws HiveException { /* * Once enough rows have been output, there is no need to generate more output. */ if ( streamingState.rankLimitReached() ) { return null; } WindowTableFunctionDef tabDef = (WindowTableFunctionDef) getTableDef(); for (int i = 0; i < tabDef.getWindowFunctions().size(); i++) { WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i); GenericUDAFEvaluator fnEval = wFn.getWFnEval(); int numRowsRemaining = wFn.getWindowFrame().getEnd().getAmt(); if (fnEval instanceof ISupportStreamingModeForWindowing) { fnEval.terminate(streamingState.aggBuffers[i]); WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFn .getName()); if (!wFnInfo.isSupportsWindow()) { numRowsRemaining = ((ISupportStreamingModeForWindowing) fnEval) .getRowsRemainingAfterTerminate(); } if (numRowsRemaining != BoundarySpec.UNBOUNDED_AMOUNT) { while (numRowsRemaining > 0) { Object out = ((ISupportStreamingModeForWindowing) fnEval) .getNextResult(streamingState.aggBuffers[i]); if (out != null) { streamingState.fnOutputs[i] .add(out == ISupportStreamingModeForWindowing.NULL_RESULT ? null : out); } numRowsRemaining--; } } } else { while (numRowsRemaining > 0) { int rowToProcess = streamingState.rollingPart.size() - numRowsRemaining; Range rng = getRange(wFn, rowToProcess, streamingState.rollingPart, streamingState.order); PTFPartitionIterator<Object> rItr = rng.iterator(); PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, rItr); Object out = evaluateWindowFunction(wFn, rItr); streamingState.fnOutputs[i].add(out); numRowsRemaining--; } } } List<Object> oRows = new ArrayList<Object>(); while (!streamingState.rollingPart.processedAllRows() && !streamingState.rankLimitReached() ) { boolean hasRow = streamingState.hasOutputRow(); if (!hasRow && !streamingState.rankLimitReached() ) { throw new HiveException( "Internal Error: cannot generate all output rows for a Partition"); } if ( hasRow ) { oRows.add(streamingState.nextOutputRow()); } } return oRows.size() == 0 ? null : oRows; } @Override public boolean canIterateOutput() { return true; } @SuppressWarnings("rawtypes") @Override public Iterator<Object> iterator(PTFPartitionIterator<Object> pItr) throws HiveException { WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef(); ArrayList<Object> output = new ArrayList<Object>(); List<?>[] outputFromPivotFunctions = new List<?>[wTFnDef.getWindowFunctions().size()]; ArrayList<Integer> wFnsWithWindows = new ArrayList<Integer>(); PTFPartition iPart = pItr.getPartition(); int i=0; for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) { boolean processWindow = processWindow(wFn); pItr.reset(); if ( !processWindow && !wFn.isPivotResult() ) { Object out = evaluateWindowFunction(wFn, pItr); output.add(out); } else if (wFn.isPivotResult()) { GenericUDAFEvaluator streamingEval = wFn.getWFnEval().getWindowingEvaluator(wFn.getWindowFrame()); if ( streamingEval != null && streamingEval instanceof ISupportStreamingModeForWindowing ) { ISupportStreamingModeForWindowing strEval = (ISupportStreamingModeForWindowing) streamingEval; if ( strEval.getRowsRemainingAfterTerminate() == 0 ) { wFn.setWFnEval(streamingEval); if ( wFn.getOI() instanceof ListObjectInspector ) { ListObjectInspector listOI = (ListObjectInspector) wFn.getOI(); wFn.setOI(listOI.getListElementObjectInspector()); } output.add(null); wFnsWithWindows.add(i); } else { outputFromPivotFunctions[i] = (List) evaluateWindowFunction(wFn, pItr); output.add(null); } } else { outputFromPivotFunctions[i] = (List) evaluateWindowFunction(wFn, pItr); output.add(null); } } else { output.add(null); wFnsWithWindows.add(i); } i++; } i=0; for(i=0; i < iPart.getOutputOI().getAllStructFieldRefs().size(); i++) { output.add(null); } if ( wTFnDef.getRankLimit() != -1 ) { rnkLimitDef = new RankLimit(wTFnDef.getRankLimit(), wTFnDef.getRankLimitFunction(), wTFnDef.getWindowFunctions()); } return new WindowingIterator(iPart, output, outputFromPivotFunctions, ArrayUtils.toPrimitive(wFnsWithWindows.toArray(new Integer[wFnsWithWindows.size()]))); } public static class WindowingTableFunctionResolver extends TableFunctionResolver { /* * OI of object constructed from output of Wdw Fns; before it is put * in the Wdw Processing Partition. Set by Translator/Deserializer. */ private transient StructObjectInspector wdwProcessingOutputOI; public StructObjectInspector getWdwProcessingOutputOI() { return wdwProcessingOutputOI; } public void setWdwProcessingOutputOI(StructObjectInspector wdwProcessingOutputOI) { this.wdwProcessingOutputOI = wdwProcessingOutputOI; } @Override protected TableFunctionEvaluator createEvaluator(PTFDesc ptfDesc, PartitionedTableFunctionDef tDef) { return new WindowingTableFunction(); } @Override public void setupOutputOI() throws SemanticException { setOutputOI(wdwProcessingOutputOI); } /* * Setup the OI based on the: * - Input TableDef's columns * - the Window Functions. */ @Override public void initializeOutputOI() throws HiveException { setupOutputOI(); } @Override public boolean transformsRawInput() { return false; } /* * (non-Javadoc) * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#carryForwardNames() * Setting to true is correct only for special internal Functions. */ @Override public boolean carryForwardNames() { return true; } /* * (non-Javadoc) * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#getOutputNames() * Set to null only because carryForwardNames is true. */ @Override public ArrayList<String> getOutputColumnNames() { return null; } } ArrayList<Object> executeFnwithWindow(PTFDesc ptfDesc, WindowFunctionDef wFnDef, PTFPartition iPart, Order order) throws HiveException { ArrayList<Object> vals = new ArrayList<Object>(); for(int i=0; i < iPart.size(); i++) { Range rng = getRange(wFnDef, i, iPart, order); PTFPartitionIterator<Object> rItr = rng.iterator(); PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, rItr); Object out = evaluateWindowFunction(wFnDef, rItr); vals.add(out); } return vals; } Range getRange(WindowFunctionDef wFnDef, int currRow, PTFPartition p, Order order) throws HiveException { BoundaryDef startB = wFnDef.getWindowFrame().getStart(); BoundaryDef endB = wFnDef.getWindowFrame().getEnd(); boolean rowFrame = true; if ( startB instanceof ValueBoundaryDef || endB instanceof ValueBoundaryDef) { rowFrame = false; } int start, end; if (rowFrame) { start = getRowBoundaryStart(startB, currRow); end = getRowBoundaryEnd(endB, currRow, p); } else { ValueBoundaryScanner vbs; if ( startB instanceof ValueBoundaryDef ) { vbs = ValueBoundaryScanner.getScanner((ValueBoundaryDef)startB, order); } else { vbs = ValueBoundaryScanner.getScanner((ValueBoundaryDef)endB, order); } vbs.reset(startB); start = vbs.computeStart(currRow, p); vbs.reset(endB); end = vbs.computeEnd(currRow, p); } start = start < 0 ? 0 : start; end = end > p.size() ? p.size() : end; return new Range(start, end, p); } int getRowBoundaryStart(BoundaryDef b, int currRow) throws HiveException { Direction d = b.getDirection(); int amt = b.getAmt(); switch(d) { case PRECEDING: if (amt == BoundarySpec.UNBOUNDED_AMOUNT) { return 0; } else { return currRow - amt; } case CURRENT: return currRow; case FOLLOWING: return currRow + amt; } throw new HiveException("Unknown Start Boundary Direction: " + d); } int getRowBoundaryEnd(BoundaryDef b, int currRow, PTFPartition p) throws HiveException { Direction d = b.getDirection(); int amt = b.getAmt(); switch(d) { case PRECEDING: if ( amt == 0 ) { return currRow + 1; } return currRow - amt; case CURRENT: return currRow + 1; case FOLLOWING: if (amt == BoundarySpec.UNBOUNDED_AMOUNT) { return p.size(); } else { return currRow + amt + 1; } } throw new HiveException("Unknown End Boundary Direction: " + d); } static class Range { int start; int end; PTFPartition p; public Range(int start, int end, PTFPartition p) { super(); this.start = start; this.end = end; this.p = p; } public PTFPartitionIterator<Object> iterator() { return p.range(start, end); } } /* * - starting from the given rowIdx scan in the given direction until a row's expr * evaluates to an amt that crosses the 'amt' threshold specified in the ValueBoundaryDef. */ static abstract class ValueBoundaryScanner { BoundaryDef bndDef; Order order; PTFExpressionDef expressionDef; public ValueBoundaryScanner(BoundaryDef bndDef, Order order, PTFExpressionDef expressionDef) { this.bndDef = bndDef; this.order = order; this.expressionDef = expressionDef; } public void reset(BoundaryDef bndDef) { this.bndDef = bndDef; } /* | Use | Boundary1.type | Boundary1. amt | Sort Key | Order | Behavior | | Case | | | | | | |------+----------------+----------------+----------+-------+-----------------------------------| | 1. | PRECEDING | UNB | ANY | ANY | start = 0 | | 2. | PRECEDING | unsigned int | NULL | ASC | start = 0 | | 3. | | | | DESC | scan backwards to row R2 | | | | | | | such that R2.sk is not null | | | | | | | start = R2.idx + 1 | | 4. | PRECEDING | unsigned int | not NULL | DESC | scan backwards until row R2 | | | | | | | such that R2.sk - R.sk > amt | | | | | | | start = R2.idx + 1 | | 5. | PRECEDING | unsigned int | not NULL | ASC | scan backward until row R2 | | | | | | | such that R.sk - R2.sk > bnd1.amt | | | | | | | start = R2.idx + 1 | | 6. | CURRENT ROW | | NULL | ANY | scan backwards until row R2 | | | | | | | such that R2.sk is not null | | | | | | | start = R2.idx + 1 | | 7. | CURRENT ROW | | not NULL | ANY | scan backwards until row R2 | | | | | | | such R2.sk != R.sk | | | | | | | start = R2.idx + 1 | | 8. | FOLLOWING | UNB | ANY | ANY | Error | | 9. | FOLLOWING | unsigned int | NULL | DESC | start = partition.size | | 10. | | | | ASC | scan forward until R2 | | | | | | | such that R2.sk is not null | | | | | | | start = R2.idx | | 11. | FOLLOWING | unsigned int | not NULL | DESC | scan forward until row R2 | | | | | | | such that R.sk - R2.sk > amt | | | | | | | start = R2.idx | | 12. | | | | ASC | scan forward until row R2 | | | | | | | such that R2.sk - R.sk > amt | |------+----------------+----------------+----------+-------+-----------------------------------| */ protected int computeStart(int rowIdx, PTFPartition p) throws HiveException { switch(bndDef.getDirection()) { case PRECEDING: return computeStartPreceding(rowIdx, p); case CURRENT: return computeStartCurrentRow(rowIdx, p); case FOLLOWING: default: return computeStartFollowing(rowIdx, p); } } /* * */ protected int computeStartPreceding(int rowIdx, PTFPartition p) throws HiveException { int amt = bndDef.getAmt(); // Use Case 1. if ( amt == BoundarySpec.UNBOUNDED_AMOUNT ) { return 0; } Object sortKey = computeValue(p.getAt(rowIdx)); if ( sortKey == null ) { // Use Case 2. if ( order == Order.ASC ) { return 0; } else { // Use Case 3. while ( sortKey == null && rowIdx >= 0 ) { --rowIdx; if ( rowIdx >= 0 ) { sortKey = computeValue(p.getAt(rowIdx)); } } return rowIdx+1; } } Object rowVal = sortKey; int r = rowIdx; // Use Case 4. if ( order == Order.DESC ) { while (r >= 0 && !isGreater(rowVal, sortKey, amt) ) { r--; if ( r >= 0 ) { rowVal = computeValue(p.getAt(r)); } } return r + 1; } else { // Use Case 5. while (r >= 0 && !isGreater(sortKey, rowVal, amt) ) { r--; if ( r >= 0 ) { rowVal = computeValue(p.getAt(r)); } } return r + 1; } } protected int computeStartCurrentRow(int rowIdx, PTFPartition p) throws HiveException { Object sortKey = computeValue(p.getAt(rowIdx)); // Use Case 6. if ( sortKey == null ) { while ( sortKey == null && rowIdx >= 0 ) { --rowIdx; if ( rowIdx >= 0 ) { sortKey = computeValue(p.getAt(rowIdx)); } } return rowIdx+1; } Object rowVal = sortKey; int r = rowIdx; // Use Case 7. while (r >= 0 && isEqual(rowVal, sortKey) ) { r--; if ( r >= 0 ) { rowVal = computeValue(p.getAt(r)); } } return r + 1; } protected int computeStartFollowing(int rowIdx, PTFPartition p) throws HiveException { int amt = bndDef.getAmt(); Object sortKey = computeValue(p.getAt(rowIdx)); Object rowVal = sortKey; int r = rowIdx; if ( sortKey == null ) { // Use Case 9. if ( order == Order.DESC) { return p.size(); } else { // Use Case 10. while (r < p.size() && rowVal == null ) { r++; if ( r < p.size() ) { rowVal = computeValue(p.getAt(r)); } } return r; } } // Use Case 11. if ( order == Order.DESC) { while (r < p.size() && !isGreater(sortKey, rowVal, amt) ) { r++; if ( r < p.size() ) { rowVal = computeValue(p.getAt(r)); } } return r; } else { // Use Case 12. while (r < p.size() && !isGreater(rowVal, sortKey, amt) ) { r++; if ( r < p.size() ) { rowVal = computeValue(p.getAt(r)); } } return r; } } /* | Use | Boundary2.type | Boundary2.amt | Sort Key | Order | Behavior | | Case | | | | | | |------+----------------+---------------+----------+-------+-----------------------------------| | 1. | PRECEDING | UNB | ANY | ANY | Error | | 2. | PRECEDING | unsigned int | NULL | DESC | end = partition.size() | | 3. | | | | ASC | end = 0 | | 4. | PRECEDING | unsigned int | not null | DESC | scan backward until row R2 | | | | | | | such that R2.sk - R.sk > bnd.amt | | | | | | | end = R2.idx + 1 | | 5. | PRECEDING | unsigned int | not null | ASC | scan backward until row R2 | | | | | | | such that R.sk - R2.sk > bnd.amt | | | | | | | end = R2.idx + 1 | | 6. | CURRENT ROW | | NULL | ANY | scan forward until row R2 | | | | | | | such that R2.sk is not null | | | | | | | end = R2.idx | | 7. | CURRENT ROW | | not null | ANY | scan forward until row R2 | | | | | | | such that R2.sk != R.sk | | | | | | | end = R2.idx | | 8. | FOLLOWING | UNB | ANY | ANY | end = partition.size() | | 9. | FOLLOWING | unsigned int | NULL | DESC | end = partition.size() | | 10. | | | | ASC | scan forward until row R2 | | | | | | | such that R2.sk is not null | | | | | | | end = R2.idx | | 11. | FOLLOWING | unsigned int | not NULL | DESC | scan forward until row R2 | | | | | | | such R.sk - R2.sk > bnd.amt | | | | | | | end = R2.idx | | 12. | | | | ASC | scan forward until row R2 | | | | | | | such R2.sk - R2.sk > bnd.amt | | | | | | | end = R2.idx | |------+----------------+---------------+----------+-------+-----------------------------------| */ protected int computeEnd(int rowIdx, PTFPartition p) throws HiveException { switch(bndDef.getDirection()) { case PRECEDING: return computeEndPreceding(rowIdx, p); case CURRENT: return computeEndCurrentRow(rowIdx, p); case FOLLOWING: default: return computeEndFollowing(rowIdx, p); } } protected int computeEndPreceding(int rowIdx, PTFPartition p) throws HiveException { int amt = bndDef.getAmt(); // Use Case 1. // amt == UNBOUNDED, is caught during translation Object sortKey = computeValue(p.getAt(rowIdx)); if ( sortKey == null ) { // Use Case 2. if ( order == Order.DESC ) { return p.size(); } else { // Use Case 3. return 0; } } Object rowVal = sortKey; int r = rowIdx; // Use Case 4. if ( order == Order.DESC ) { while (r >= 0 && !isGreater(rowVal, sortKey, amt) ) { r--; if ( r >= 0 ) { rowVal = computeValue(p.getAt(r)); } } return r + 1; } else { // Use Case 5. while (r >= 0 && !isGreater(sortKey, rowVal, amt) ) { r--; if ( r >= 0 ) { rowVal = computeValue(p.getAt(r)); } } return r + 1; } } protected int computeEndCurrentRow(int rowIdx, PTFPartition p) throws HiveException { Object sortKey = computeValue(p.getAt(rowIdx)); // Use Case 6. if ( sortKey == null ) { while ( sortKey == null && rowIdx < p.size() ) { ++rowIdx; if ( rowIdx < p.size() ) { sortKey = computeValue(p.getAt(rowIdx)); } } return rowIdx; } Object rowVal = sortKey; int r = rowIdx; // Use Case 7. while (r < p.size() && isEqual(sortKey, rowVal) ) { r++; if ( r < p.size() ) { rowVal = computeValue(p.getAt(r)); } } return r; } protected int computeEndFollowing(int rowIdx, PTFPartition p) throws HiveException { int amt = bndDef.getAmt(); // Use Case 8. if ( amt == BoundarySpec.UNBOUNDED_AMOUNT ) { return p.size(); } Object sortKey = computeValue(p.getAt(rowIdx)); Object rowVal = sortKey; int r = rowIdx; if ( sortKey == null ) { // Use Case 9. if ( order == Order.DESC) { return p.size(); } else { // Use Case 10. while (r < p.size() && rowVal == null ) { r++; if ( r < p.size() ) { rowVal = computeValue(p.getAt(r)); } } return r; } } // Use Case 11. if ( order == Order.DESC) { while (r < p.size() && !isGreater(sortKey, rowVal, amt) ) { r++; if ( r < p.size() ) { rowVal = computeValue(p.getAt(r)); } } return r; } else { // Use Case 12. while (r < p.size() && !isGreater(rowVal, sortKey, amt) ) { r++; if ( r < p.size() ) { rowVal = computeValue(p.getAt(r)); } } return r; } } public Object computeValue(Object row) throws HiveException { Object o = expressionDef.getExprEvaluator().evaluate(row); return ObjectInspectorUtils.copyToStandardObject(o, expressionDef.getOI()); } public abstract boolean isGreater(Object v1, Object v2, int amt); public abstract boolean isEqual(Object v1, Object v2); @SuppressWarnings("incomplete-switch") public static ValueBoundaryScanner getScanner(ValueBoundaryDef vbDef, Order order) throws HiveException { PrimitiveObjectInspector pOI = (PrimitiveObjectInspector) vbDef.getOI(); switch(pOI.getPrimitiveCategory()) { case BYTE: case INT: case LONG: case SHORT: case TIMESTAMP: return new LongValueBoundaryScanner(vbDef, order, vbDef.getExpressionDef()); case DOUBLE: case FLOAT: return new DoubleValueBoundaryScanner(vbDef, order, vbDef.getExpressionDef()); case DECIMAL: return new HiveDecimalValueBoundaryScanner(vbDef, order, vbDef.getExpressionDef()); case STRING: return new StringValueBoundaryScanner(vbDef, order, vbDef.getExpressionDef()); } throw new HiveException( String.format("Internal Error: attempt to setup a Window for datatype %s", pOI.getPrimitiveCategory())); } } public static class LongValueBoundaryScanner extends ValueBoundaryScanner { public LongValueBoundaryScanner(BoundaryDef bndDef, Order order, PTFExpressionDef expressionDef) { super(bndDef,order,expressionDef); } @Override public boolean isGreater(Object v1, Object v2, int amt) { long l1 = PrimitiveObjectInspectorUtils.getLong(v1, (PrimitiveObjectInspector) expressionDef.getOI()); long l2 = PrimitiveObjectInspectorUtils.getLong(v2, (PrimitiveObjectInspector) expressionDef.getOI()); return (l1 -l2) > amt; } @Override public boolean isEqual(Object v1, Object v2) { long l1 = PrimitiveObjectInspectorUtils.getLong(v1, (PrimitiveObjectInspector) expressionDef.getOI()); long l2 = PrimitiveObjectInspectorUtils.getLong(v2, (PrimitiveObjectInspector) expressionDef.getOI()); return l1 == l2; } } public static class DoubleValueBoundaryScanner extends ValueBoundaryScanner { public DoubleValueBoundaryScanner(BoundaryDef bndDef, Order order, PTFExpressionDef expressionDef) { super(bndDef,order,expressionDef); } @Override public boolean isGreater(Object v1, Object v2, int amt) { double d1 = PrimitiveObjectInspectorUtils.getDouble(v1, (PrimitiveObjectInspector) expressionDef.getOI()); double d2 = PrimitiveObjectInspectorUtils.getDouble(v2, (PrimitiveObjectInspector) expressionDef.getOI()); return (d1 -d2) > amt; } @Override public boolean isEqual(Object v1, Object v2) { double d1 = PrimitiveObjectInspectorUtils.getDouble(v1, (PrimitiveObjectInspector) expressionDef.getOI()); double d2 = PrimitiveObjectInspectorUtils.getDouble(v2, (PrimitiveObjectInspector) expressionDef.getOI()); return d1 == d2; } } public static class HiveDecimalValueBoundaryScanner extends ValueBoundaryScanner { public HiveDecimalValueBoundaryScanner(BoundaryDef bndDef, Order order, PTFExpressionDef expressionDef) { super(bndDef,order,expressionDef); } @Override public boolean isGreater(Object v1, Object v2, int amt) { HiveDecimal d1 = PrimitiveObjectInspectorUtils.getHiveDecimal(v1, (PrimitiveObjectInspector) expressionDef.getOI()); HiveDecimal d2 = PrimitiveObjectInspectorUtils.getHiveDecimal(v2, (PrimitiveObjectInspector) expressionDef.getOI()); if ( d1 == null || d2 == null ) { return false; } return d1.subtract(d2).intValue() > amt; } @Override public boolean isEqual(Object v1, Object v2) { HiveDecimal d1 = PrimitiveObjectInspectorUtils.getHiveDecimal(v1, (PrimitiveObjectInspector) expressionDef.getOI()); HiveDecimal d2 = PrimitiveObjectInspectorUtils.getHiveDecimal(v2, (PrimitiveObjectInspector) expressionDef.getOI()); if ( d1 == null || d2 == null ) { return false; } return d1.equals(d2); } } public static class StringValueBoundaryScanner extends ValueBoundaryScanner { public StringValueBoundaryScanner(BoundaryDef bndDef, Order order, PTFExpressionDef expressionDef) { super(bndDef,order,expressionDef); } @Override public boolean isGreater(Object v1, Object v2, int amt) { String s1 = PrimitiveObjectInspectorUtils.getString(v1, (PrimitiveObjectInspector) expressionDef.getOI()); String s2 = PrimitiveObjectInspectorUtils.getString(v2, (PrimitiveObjectInspector) expressionDef.getOI()); return s1 != null && s2 != null && s1.compareTo(s2) > 0; } @Override public boolean isEqual(Object v1, Object v2) { String s1 = PrimitiveObjectInspectorUtils.getString(v1, (PrimitiveObjectInspector) expressionDef.getOI()); String s2 = PrimitiveObjectInspectorUtils.getString(v2, (PrimitiveObjectInspector) expressionDef.getOI()); return (s1 == null && s2 == null) || s1.equals(s2); } } public static class SameList<E> extends AbstractList<E> { int sz; E val; public SameList(int sz, E val) { this.sz = sz; this.val = val; } @Override public E get(int index) { return val; } @Override public int size() { return sz; } } public class WindowingIterator implements Iterator<Object> { ArrayList<Object> output; List<?>[] outputFromPivotFunctions; int currIdx; PTFPartition iPart; /* * these are the functions that have a Window. * Fns w/o a Window have already been processed. */ int[] wFnsToProcess; WindowTableFunctionDef wTFnDef; Order order; PTFDesc ptfDesc; StructObjectInspector inputOI; AggregationBuffer[] aggBuffers; Object[][] args; RankLimit rnkLimit; WindowingIterator(PTFPartition iPart, ArrayList<Object> output, List<?>[] outputFromPivotFunctions, int[] wFnsToProcess) { this.iPart = iPart; this.output = output; this.outputFromPivotFunctions = outputFromPivotFunctions; this.wFnsToProcess = wFnsToProcess; this.currIdx = 0; wTFnDef = (WindowTableFunctionDef) getTableDef(); order = wTFnDef.getOrder().getExpressions().get(0).getOrder(); ptfDesc = getQueryDef(); inputOI = iPart.getOutputOI(); aggBuffers = new AggregationBuffer[wTFnDef.getWindowFunctions().size()]; args = new Object[wTFnDef.getWindowFunctions().size()][]; try { for (int j : wFnsToProcess) { WindowFunctionDef wFn = wTFnDef.getWindowFunctions().get(j); aggBuffers[j] = wFn.getWFnEval().getNewAggregationBuffer(); args[j] = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()]; } } catch (HiveException he) { throw new RuntimeException(he); } if ( WindowingTableFunction.this.rnkLimitDef != null ) { rnkLimit = new RankLimit(WindowingTableFunction.this.rnkLimitDef); } } @Override public boolean hasNext() { if ( rnkLimit != null && rnkLimit.limitReached() ) { return false; } return currIdx < iPart.size(); } @Override public Object next() { int i; for(i = 0; i < outputFromPivotFunctions.length; i++ ) { if ( outputFromPivotFunctions[i] != null ) { output.set(i, outputFromPivotFunctions[i].get(currIdx)); } } try { for (int j : wFnsToProcess) { WindowFunctionDef wFn = wTFnDef.getWindowFunctions().get(j); if (wFn.getWFnEval() instanceof ISupportStreamingModeForWindowing) { Object iRow = iPart.getAt(currIdx); int a = 0; if (wFn.getArgs() != null) { for (PTFExpressionDef arg : wFn.getArgs()) { args[j][a++] = arg.getExprEvaluator().evaluate(iRow); } } wFn.getWFnEval().aggregate(aggBuffers[j], args[j]); Object out = ((ISupportStreamingModeForWindowing) wFn.getWFnEval()) .getNextResult(aggBuffers[j]); out = ObjectInspectorUtils.copyToStandardObject(out, wFn.getOI()); output.set(j, out); } else { Range rng = getRange(wFn, currIdx, iPart, order); PTFPartitionIterator<Object> rItr = rng.iterator(); PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, rItr); output.set(j, evaluateWindowFunction(wFn, rItr)); } } Object iRow = iPart.getAt(currIdx); i = wTFnDef.getWindowFunctions().size(); for (StructField f : inputOI.getAllStructFieldRefs()) { output.set(i++, inputOI.getStructFieldData(iRow, f)); } } catch (HiveException he) { throw new RuntimeException(he); } if ( rnkLimit != null ) { rnkLimit.updateRank(output); } currIdx++; return output; } @Override public void remove() { throw new UnsupportedOperationException(); } } class StreamingState { PTFRollingPartition rollingPart; List<Object>[] fnOutputs; AggregationBuffer[] aggBuffers; Object[][] funcArgs; Order order; RankLimit rnkLimit; @SuppressWarnings("unchecked") StreamingState(Configuration cfg, StructObjectInspector inputOI, boolean isMapSide, WindowTableFunctionDef tabDef, int precedingSpan, int followingSpan) throws HiveException { SerDe serde = isMapSide ? tabDef.getInput().getOutputShape().getSerde() : tabDef.getRawInputShape().getSerde(); StructObjectInspector outputOI = isMapSide ? tabDef.getInput() .getOutputShape().getOI() : tabDef.getRawInputShape().getOI(); rollingPart = PTFPartition.createRolling(cfg, serde, inputOI, outputOI, precedingSpan, followingSpan); order = tabDef.getOrder().getExpressions().get(0).getOrder(); int numFns = tabDef.getWindowFunctions().size(); fnOutputs = new ArrayList[numFns]; aggBuffers = new AggregationBuffer[numFns]; funcArgs = new Object[numFns][]; for (int i = 0; i < numFns; i++) { fnOutputs[i] = new ArrayList<Object>(); WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i); funcArgs[i] = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()]; aggBuffers[i] = wFn.getWFnEval().getNewAggregationBuffer(); } if ( WindowingTableFunction.this.rnkLimitDef != null ) { rnkLimit = new RankLimit(WindowingTableFunction.this.rnkLimitDef); } } void reset(WindowTableFunctionDef tabDef) throws HiveException { int numFns = tabDef.getWindowFunctions().size(); rollingPart.reset(); for (int i = 0; i < fnOutputs.length; i++) { fnOutputs[i].clear(); } for (int i = 0; i < numFns; i++) { WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i); aggBuffers[i] = wFn.getWFnEval().getNewAggregationBuffer(); } if ( rnkLimit != null ) { rnkLimit.reset(); } } boolean hasOutputRow() { if ( rankLimitReached() ) { return false; } for (int i = 0; i < fnOutputs.length; i++) { if (fnOutputs[i].size() == 0) { return false; } } return true; } List<Object> nextOutputRow() throws HiveException { List<Object> oRow = new ArrayList<Object>(); Object iRow = rollingPart.nextOutputRow(); int i = 0; for (; i < fnOutputs.length; i++) { oRow.add(fnOutputs[i].remove(0)); } for (StructField f : rollingPart.getOutputOI().getAllStructFieldRefs()) { oRow.add(rollingPart.getOutputOI().getStructFieldData(iRow, f)); } if ( rnkLimit != null ) { rnkLimit.updateRank(oRow); } return oRow; } boolean rankLimitReached() { return rnkLimit != null && rnkLimit.limitReached(); } } static class RankLimit { /* * Rows with a rank <= rankLimit are output. * Only the first row with rank = rankLimit is output. */ final int rankLimit; /* * the rankValue of the last row output. */ int currentRank; /* * index of Rank function. */ final int rankFnIdx; final PrimitiveObjectInspector fnOutOI; RankLimit(int rankLimit, int rankFnIdx, List<WindowFunctionDef> wdwFnDefs) { this.rankLimit = rankLimit; this.rankFnIdx = rankFnIdx; this.fnOutOI = (PrimitiveObjectInspector) wdwFnDefs.get(rankFnIdx).getOI(); this.currentRank = -1; } RankLimit(RankLimit rl) { this.rankLimit = rl.rankLimit; this.rankFnIdx = rl.rankFnIdx; this.fnOutOI = rl.fnOutOI; this.currentRank = -1; } void reset() { this.currentRank = -1; } void updateRank(List<Object> oRow) { int r = (Integer) fnOutOI.getPrimitiveJavaObject(oRow.get(rankFnIdx)); if ( r > currentRank ) { currentRank = r; } } boolean limitReached() { return currentRank >= rankLimit; } } }
apache-2.0
pperalta/ignite
modules/core/src/main/java/org/apache/ignite/internal/pagemem/wal/record/delta/DataPageInsertFragmentRecord.java
2521
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.pagemem.wal.record.delta; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.pagemem.PageMemory; import org.apache.ignite.internal.processors.cache.database.tree.io.DataPageIO; /** * Insert fragment to data page record. */ public class DataPageInsertFragmentRecord extends PageDeltaRecord { /** Link to the last entry fragment. */ private final long lastLink; /** Actual fragment data. */ private final byte[] payload; /** * @param cacheId Cache ID. * @param pageId Page ID. * @param payload Fragment payload. * @param lastLink Link to the last entry fragment. */ public DataPageInsertFragmentRecord( final int cacheId, final long pageId, final byte[] payload, final long lastLink ) { super(cacheId, pageId); this.lastLink = lastLink; this.payload = payload; } /** {@inheritDoc} */ @Override public void applyDelta(PageMemory pageMem, long pageAddr) throws IgniteCheckedException { DataPageIO io = DataPageIO.VERSIONS.forPage(pageAddr); io.addRowFragment(pageAddr, payload, lastLink, pageMem.pageSize()); } /** {@inheritDoc} */ @Override public RecordType type() { return RecordType.DATA_PAGE_INSERT_FRAGMENT_RECORD; } /** * @return Fragment payload size. */ public int payloadSize() { return payload.length; } /** * @return Fragment payload. */ public byte[] payload() { return payload; } /** * @return Link to the last entry fragment. */ public long lastLink() { return lastLink; } }
apache-2.0
Nmishin/jagger
chassis/core/src/main/java/com/griddynamics/jagger/engine/e1/services/JaggerPlace.java
653
package com.griddynamics.jagger.engine.e1.services; /** * Created with IntelliJ IDEA. * User: kgribov * Date: 12/10/13 * Time: 4:17 PM * To change this template use File | Settings | File Templates. */ public enum JaggerPlace { INVOCATION_LISTENER("InvocationListener"), TEST_LISTENER("TestListener"), TEST_GROUP_LISTENER("TestGroupListener"), LOAD_SCENARIO_LISTENER("LoadScenarioListener"), TEST_GROUP_DECISION_MAKER_LISTENER("TestGroupDecisionMakerListener"); private String name; JaggerPlace(String name){ this.name = name; } @Override public String toString() { return name; } }
apache-2.0
SlimSaber/KernelAdiutor
app/src/main/java/com/grarak/kerneladiutor/fragments/tools/RecoveryFragment.java
8338
/* * Copyright (C) 2015 Willi Ye * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.grarak.kerneladiutor.fragments.tools; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.support.v7.widget.AppCompatSpinner; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.TextView; import com.grarak.kerneladiutor.FileBrowserActivity; import com.grarak.kerneladiutor.R; import com.grarak.kerneladiutor.elements.cards.CardViewItem; import com.grarak.kerneladiutor.fragments.RecyclerViewFragment; import com.grarak.kerneladiutor.utils.Utils; import com.grarak.kerneladiutor.utils.root.RootFile; import com.grarak.kerneladiutor.utils.root.RootUtils; import com.grarak.kerneladiutor.utils.tools.Recovery; import java.io.File; import java.util.ArrayList; import java.util.List; /** * Created by willi on 11.04.15. */ public class RecoveryFragment extends RecyclerViewFragment { private AppCompatSpinner mRecoverySpinner; private final List<Recovery> mCommands = new ArrayList<>(); @Override public boolean showApplyOnBoot() { return false; } @Override public RecyclerView getRecyclerView() { View view = getParentView(R.layout.recovery_recyclerview); ArrayAdapter<String> dataAdapter = new ArrayAdapter<>(getActivity(), R.layout.simple_spinner_item, getResources().getStringArray(R.array.recovery_variants)); dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mRecoverySpinner = (AppCompatSpinner) view.findViewById(R.id.recovery_spinner); mRecoverySpinner.setAdapter(dataAdapter); mRecoverySpinner.setSelection(Utils.getBoolean("twrp", false, getActivity()) ? 1 : 0); mRecoverySpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { Utils.saveBoolean("twrp", position == 1, getActivity()); } @Override public void onNothingSelected(AdapterView<?> parent) { } }); view.findViewById(R.id.wipe_data_button).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { addAction(Recovery.RECOVERY_COMMAND.WIPE_DATA, null); } }); view.findViewById(R.id.wipe_cache_button).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { addAction(Recovery.RECOVERY_COMMAND.WIPE_CACHE, null); } }); view.findViewById(R.id.flash_zip_button).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Bundle args = new Bundle(); args.putString(FileBrowserActivity.FILE_TYPE_ARG, "zip"); Intent intent = new Intent(getActivity(), FileBrowserActivity.class); intent.putExtras(args); startActivityForResult(intent, 0); } }); return (RecyclerView) view.findViewById(R.id.recycler_view); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (data != null) addAction(Recovery.RECOVERY_COMMAND.FLASH_ZIP, new File(data.getExtras().getString("path"))); } private void addAction(Recovery.RECOVERY_COMMAND recovery_command, File file) { String description = null; switch (recovery_command) { case WIPE_DATA: description = getString(R.string.wipe_data); break; case WIPE_CACHE: description = getString(R.string.wipe_cache); break; case FLASH_ZIP: description = file.getAbsolutePath(); if (!description.endsWith(".zip")) { Utils.toast(getString(R.string.went_wrong), getActivity()); return; } break; } final Recovery recovery = new Recovery(recovery_command, new File(description)); mCommands.add(recovery); View view = LayoutInflater.from(getActivity()).inflate(R.layout.recovery_actionview, null, false); final CardViewItem.DCardView mActionCard = new CardViewItem.DCardView(); ((TextView) view.findViewById(R.id.action_text)).setText(description); view.findViewById(R.id.delete_button).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { removeView(mActionCard); mCommands.remove(recovery); } }); mActionCard.setView(view); addView(mActionCard); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.recovery_menu, menu); super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { String command = null; switch (item.getItemId()) { case R.id.menu_flash_now: if (mCommands.size() < 1) { Utils.toast(getString(R.string.add_action_first), getActivity()); break; } Utils.confirmDialog(null, getString(R.string.flash_now_confirm), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { String file = "/cache/recovery/" + mCommands.get(0).getFile(mRecoverySpinner .getSelectedItemPosition() == 1 ? Recovery.RECOVERY.TWRP : Recovery.RECOVERY.CWM); RootFile recoveryFile = new RootFile(file); recoveryFile.delete(); for (Recovery commands : mCommands) { for (String command : commands.getCommands(mRecoverySpinner.getSelectedItemPosition() == 1 ? Recovery.RECOVERY.TWRP : Recovery.RECOVERY.CWM)) recoveryFile.write(command, true); } RootUtils.runCommand("reboot recovery"); } }, getActivity()); break; case R.id.menu_reboot: command = "reboot"; break; case R.id.menu_reboot_recovery: command = "reboot recovery"; break; case R.id.menu_reboot_bootloader: command = "reboot bootloader"; break; case R.id.menu_reboot_soft: command = "pkill zygote"; break; case R.id.menu_reboot_download: command = "reboot download"; break; } if (command != null) { final String c = command; Utils.confirmDialog(null, getString(R.string.confirm), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { RootUtils.runCommand(c); } }, getActivity()); } return true; } }
apache-2.0
life-beam/j2objc
jre_emul/android/platform/external/icu/android_icu4j/src/main/tests/android/icu/dev/test/bidi/BidiFmwk.java
18048
/* GENERATED SOURCE. DO NOT MODIFY. */ // © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html#License /* ******************************************************************************* * Copyright (C) 2001-2013, International Business Machines * Corporation and others. All Rights Reserved. ******************************************************************************* */ package android.icu.dev.test.bidi; import java.util.Arrays; import android.icu.dev.test.TestFmwk; import android.icu.impl.Utility; import android.icu.lang.UCharacter; import android.icu.text.Bidi; import android.icu.text.BidiRun; import android.icu.util.VersionInfo; /** * A base class for the Bidi test suite. * * @author Lina Kemmel, Matitiahu Allouche */ public class BidiFmwk extends TestFmwk { protected static final char[] charFromDirProp = { /* L R EN ES ET AN CS B S WS ON */ 0x61, 0x5d0, 0x30, 0x2f, 0x25, 0x660, 0x2c, 0xa, 0x9, 0x20, 0x26, /* LRE LRO AL RLE RLO PDF NSM BN */ 0x202a, 0x202d, 0x627, 0x202b, 0x202e, 0x202c, 0x308, 0x200c, /* FSI LRI RLI PDI */ 0x2068, 0x2066, 0x2067, 0x2069 /* new in Unicode 6.3/ICU 52 */ }; static { initCharFromDirProps(); } private static void initCharFromDirProps() { final VersionInfo ucd401 = VersionInfo.getInstance(4, 0, 1, 0); VersionInfo ucdVersion = VersionInfo.getInstance(0, 0, 0, 0); /* lazy initialization */ if (ucdVersion.getMajor() > 0) { return; } ucdVersion = UCharacter.getUnicodeVersion(); if (ucdVersion.compareTo(ucd401) >= 0) { /* Unicode 4.0.1 changes bidi classes for +-/ */ /* change ES character from / to + */ charFromDirProp[TestData.ES] = 0x2b; } } protected boolean assertEquals(String message, String expected, String actual, String src, String mode, String option, String level) { if (expected == null || actual == null) { return super.assertEquals(message, expected, actual); } if (expected.equals(actual)) { return true; } errln(""); errcontln(message); if (src != null) { errcontln("source : \"" + Utility.escape(src) + "\""); } errcontln("expected : \"" + Utility.escape(expected) + "\""); errcontln("actual : \"" + Utility.escape(actual) + "\""); if (mode != null) { errcontln("reordering mode : " + mode); } if (option != null) { errcontln("reordering option : " + option); } if (level != null) { errcontln("paragraph level : " + level); } return false; } protected static String valueOf(int[] array) { StringBuffer result = new StringBuffer(array.length * 4); for (int i = 0; i < array.length; i++) { result.append(' '); result.append(array[i]); } return result.toString(); } private static final String[] modeDescriptions = { "REORDER_DEFAULT", "REORDER_NUMBERS_SPECIAL", "REORDER_GROUP_NUMBERS_WITH_R", "REORDER_RUNS_ONLY", "REORDER_INVERSE_NUMBERS_AS_L", "REORDER_INVERSE_LIKE_DIRECT", "REORDER_INVERSE_FOR_NUMBERS_SPECIAL" }; protected static String modeToString(int mode) { if (mode < Bidi.REORDER_DEFAULT || mode > Bidi.REORDER_INVERSE_FOR_NUMBERS_SPECIAL) { return "INVALID"; } return modeDescriptions[mode]; } private static final short SETPARA_MASK = Bidi.OPTION_INSERT_MARKS | Bidi.OPTION_REMOVE_CONTROLS | Bidi.OPTION_STREAMING; private static final String[] setParaDescriptions = { "OPTION_INSERT_MARKS", "OPTION_REMOVE_CONTROLS", "OPTION_STREAMING" }; protected static String spOptionsToString(int option) { return optionToString(option, SETPARA_MASK, setParaDescriptions); } private static final int MAX_WRITE_REORDERED_OPTION = Bidi.OUTPUT_REVERSE; private static final int REORDER_MASK = (MAX_WRITE_REORDERED_OPTION << 1) - 1; private static final String[] writeReorderedDescriptions = { "KEEP_BASE_COMBINING", // 1 "DO_MIRRORING", // 2 "INSERT_LRM_FOR_NUMERIC", // 4 "REMOVE_BIDI_CONTROLS", // 8 "OUTPUT_REVERSE" // 16 }; public static String wrOptionsToString(int option) { return optionToString(option, REORDER_MASK, writeReorderedDescriptions); } public static String optionToString(int option, int mask, String[] descriptions) { StringBuffer desc = new StringBuffer(50); if ((option &= mask) == 0) { return "0"; } desc.setLength(0); for (int i = 0; option > 0; i++, option >>= 1) { if ((option & 1) != 0) { if (desc.length() > 0) { desc.append(" | "); } desc.append(descriptions[i]); } } return desc.toString(); } static final String columnString = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; static final char[] columns = columnString.toCharArray(); private static final int TABLE_SIZE = 256; private static boolean tablesInitialized = false; private static char[] pseudoToUChar; private static char[] UCharToPseudo; /* used for Unicode chars < 0x0100 */ private static char[] UCharToPseud2; /* used for Unicode chars >=0x0100 */ static void buildPseudoTables() /* The rules for pseudo-Bidi are as follows: - [ == LRE - ] == RLE - { == LRO - } == RLO - ^ == PDF - @ == LRM - & == RLM - A-F == Arabic Letters 0631-0636 - G-V == Hebrew letters 05d7-05ea - W-Z == Unassigned RTL 08d0-08d3 - 0-5 == western digits 0030-0035 - 6-9 == Arabic-Indic digits 0666-0669 - ` == Combining Grave Accent 0300 (NSM) - ~ == Delete 007f (BN) - | == Paragraph Separator 2029 (B) - _ == Info Separator 1 001f (S) All other characters represent themselves as Latin-1, with the corresponding Bidi properties. */ { int i; char uchar; char c; /* initialize all tables to unknown */ pseudoToUChar = new char[TABLE_SIZE]; UCharToPseudo = new char[TABLE_SIZE]; UCharToPseud2 = new char[TABLE_SIZE]; for (i = 0; i < TABLE_SIZE; i++) { pseudoToUChar[i] = 0xFFFD; UCharToPseudo[i] = '?'; UCharToPseud2[i] = '?'; } /* initialize non letters or digits */ pseudoToUChar[ 0 ] = 0x0000; UCharToPseudo[0x00] = 0 ; pseudoToUChar[' '] = 0x0020; UCharToPseudo[0x20] = ' '; pseudoToUChar['!'] = 0x0021; UCharToPseudo[0x21] = '!'; pseudoToUChar['"'] = 0x0022; UCharToPseudo[0x22] = '"'; pseudoToUChar['#'] = 0x0023; UCharToPseudo[0x23] = '#'; pseudoToUChar['$'] = 0x0024; UCharToPseudo[0x24] = '$'; pseudoToUChar['%'] = 0x0025; UCharToPseudo[0x25] = '%'; pseudoToUChar['\'']= 0x0027; UCharToPseudo[0x27] = '\''; pseudoToUChar['('] = 0x0028; UCharToPseudo[0x28] = '('; pseudoToUChar[')'] = 0x0029; UCharToPseudo[0x29] = ')'; pseudoToUChar['*'] = 0x002A; UCharToPseudo[0x2A] = '*'; pseudoToUChar['+'] = 0x002B; UCharToPseudo[0x2B] = '+'; pseudoToUChar[','] = 0x002C; UCharToPseudo[0x2C] = ','; pseudoToUChar['-'] = 0x002D; UCharToPseudo[0x2D] = '-'; pseudoToUChar['.'] = 0x002E; UCharToPseudo[0x2E] = '.'; pseudoToUChar['/'] = 0x002F; UCharToPseudo[0x2F] = '/'; pseudoToUChar[':'] = 0x003A; UCharToPseudo[0x3A] = ':'; pseudoToUChar[';'] = 0x003B; UCharToPseudo[0x3B] = ';'; pseudoToUChar['<'] = 0x003C; UCharToPseudo[0x3C] = '<'; pseudoToUChar['='] = 0x003D; UCharToPseudo[0x3D] = '='; pseudoToUChar['>'] = 0x003E; UCharToPseudo[0x3E] = '>'; pseudoToUChar['?'] = 0x003F; UCharToPseudo[0x3F] = '?'; pseudoToUChar['\\']= 0x005C; UCharToPseudo[0x5C] = '\\'; /* initialize specially used characters */ pseudoToUChar['`'] = 0x0300; UCharToPseud2[0x00] = '`'; /* NSM */ pseudoToUChar['@'] = 0x200E; UCharToPseud2[0x0E] = '@'; /* LRM */ pseudoToUChar['&'] = 0x200F; UCharToPseud2[0x0F] = '&'; /* RLM */ pseudoToUChar['_'] = 0x001F; UCharToPseudo[0x1F] = '_'; /* S */ pseudoToUChar['|'] = 0x2029; UCharToPseud2[0x29] = '|'; /* B */ pseudoToUChar['['] = 0x202A; UCharToPseud2[0x2A] = '['; /* LRE */ pseudoToUChar[']'] = 0x202B; UCharToPseud2[0x2B] = ']'; /* RLE */ pseudoToUChar['^'] = 0x202C; UCharToPseud2[0x2C] = '^'; /* PDF */ pseudoToUChar['{'] = 0x202D; UCharToPseud2[0x2D] = '{'; /* LRO */ pseudoToUChar['}'] = 0x202E; UCharToPseud2[0x2E] = '}'; /* RLO */ pseudoToUChar['~'] = 0x007F; UCharToPseudo[0x7F] = '~'; /* BN */ /* initialize western digits */ for (i = 0, uchar = 0x0030; i < 6; i++, uchar++) { c = columns[i]; pseudoToUChar[c] = uchar; UCharToPseudo[uchar & 0x00ff] = c; } /* initialize Hindi digits */ for (i = 6, uchar = 0x0666; i < 10; i++, uchar++) { c = columns[i]; pseudoToUChar[c] = uchar; UCharToPseud2[uchar & 0x00ff] = c; } /* initialize Arabic letters */ for (i = 10, uchar = 0x0631; i < 16; i++, uchar++) { c = columns[i]; pseudoToUChar[c] = uchar; UCharToPseud2[uchar & 0x00ff] = c; } /* initialize Hebrew letters */ for (i = 16, uchar = 0x05D7; i < 32; i++, uchar++) { c = columns[i]; pseudoToUChar[c] = uchar; UCharToPseud2[uchar & 0x00ff] = c; } /* initialize Unassigned code points */ for (i = 32, uchar = 0x08D0; i < 36; i++, uchar++) { c = columns[i]; pseudoToUChar[c] = uchar; UCharToPseud2[uchar & 0x00ff] = c; } /* initialize Latin lower case letters */ for (i = 36, uchar = 0x0061; i < 62; i++, uchar++) { c = columns[i]; pseudoToUChar[c] = uchar; UCharToPseudo[uchar & 0x00ff] = c; } tablesInitialized = true; } /*----------------------------------------------------------------------*/ static String pseudoToU16(String input) /* This function converts a pseudo-Bidi string into a char string. It returns the char string. */ { int len = input.length(); char[] output = new char[len]; int i; if (!tablesInitialized) { buildPseudoTables(); } for (i = 0; i < len; i++) output[i] = pseudoToUChar[input.charAt(i)]; return new String(output); } /*----------------------------------------------------------------------*/ static String u16ToPseudo(String input) /* This function converts a char string into a pseudo-Bidi string. It returns the pseudo-Bidi string. */ { int len = input.length(); char[] output = new char[len]; int i; char uchar; if (!tablesInitialized) { buildPseudoTables(); } for (i = 0; i < len; i++) { uchar = input.charAt(i); output[i] = uchar < 0x0100 ? UCharToPseudo[uchar] : UCharToPseud2[uchar & 0x00ff]; } return new String(output); } void errcont(String message) { msg(message, ERR, false, false); } void errcontln(String message) { msg(message, ERR, false, true); } void printCaseInfo(Bidi bidi, String src, String dst) { int length = bidi.getProcessedLength(); byte[] levels = bidi.getLevels(); char[] levelChars = new char[length]; byte lev; int runCount = bidi.countRuns(); errcontln("========================================"); errcontln("Processed length: " + length); for (int i = 0; i < length; i++) { lev = levels[i]; if (lev < 0) { levelChars[i] = '-'; } else if (lev < columns.length) { levelChars[i] = columns[lev]; } else { levelChars[i] = '+'; } } errcontln("Levels: " + new String(levelChars)); errcontln("Source: " + src); errcontln("Result: " + dst); errcontln("Direction: " + bidi.getDirection()); errcontln("paraLevel: " + Byte.toString(bidi.getParaLevel())); errcontln("reorderingMode: " + modeToString(bidi.getReorderingMode())); errcontln("reorderingOptions: " + spOptionsToString(bidi.getReorderingOptions())); errcont("Runs: " + runCount + " => logicalStart.length/level: "); for (int i = 0; i < runCount; i++) { BidiRun run; run = bidi.getVisualRun(i); errcont(" " + run.getStart() + "." + run.getLength() + "/" + run.getEmbeddingLevel()); } errcont("\n"); } static final String mates1 = "<>()[]{}"; static final String mates2 = "><)(][}{"; static final char[] mates1Chars = mates1.toCharArray(); static final char[] mates2Chars = mates2.toCharArray(); boolean matchingPair(Bidi bidi, int i, char c1, char c2) { if (c1 == c2) { return true; } /* For REORDER_RUNS_ONLY, it would not be correct to check levels[i], so we use the appropriate run's level, which is good for all cases. */ if (bidi.getLogicalRun(i).getDirection() == 0) { return false; } for (int k = 0; k < mates1Chars.length; k++) { if ((c1 == mates1Chars[k]) && (c2 == mates2Chars[k])) { return true; } } return false; } boolean checkWhatYouCan(Bidi bidi, String src, String dst) { int i, idx, logLimit, visLimit; boolean testOK, errMap, errDst; char[] srcChars = src.toCharArray(); char[] dstChars = dst.toCharArray(); int[] visMap = bidi.getVisualMap(); int[] logMap = bidi.getLogicalMap(); testOK = true; errMap = errDst = false; logLimit = bidi.getProcessedLength(); visLimit = bidi.getResultLength(); if (visLimit > dstChars.length) { visLimit = dstChars.length; } char[] accumSrc = new char[logLimit]; char[] accumDst = new char[visLimit]; Arrays.fill(accumSrc, '?'); Arrays.fill(accumDst, '?'); if (logMap.length != logLimit) { errMap = true; } for (i = 0; i < logLimit; i++) { idx = bidi.getVisualIndex(i); if (idx != logMap[i]) { errMap = true; } if (idx == Bidi.MAP_NOWHERE) { continue; } if (idx >= visLimit) { continue; } accumDst[idx] = srcChars[i]; if (!matchingPair(bidi, i, srcChars[i], dstChars[idx])) { errDst = true; } } if (errMap) { if (testOK) { printCaseInfo(bidi, src, dst); testOK = false; } errln("Mismatch between getLogicalMap() and getVisualIndex()"); errcont("Map :" + valueOf(logMap)); errcont("\n"); errcont("Indexes:"); for (i = 0; i < logLimit; i++) { errcont(" " + bidi.getVisualIndex(i)); } errcont("\n"); } if (errDst) { if (testOK) { printCaseInfo(bidi, src, dst); testOK = false; } errln("Source does not map to Result"); errcontln("We got: " + new String(accumDst)); } errMap = errDst = false; if (visMap.length != visLimit) { errMap = true; } for (i = 0; i < visLimit; i++) { idx = bidi.getLogicalIndex(i); if (idx != visMap[i]) { errMap = true; } if (idx == Bidi.MAP_NOWHERE) { continue; } if (idx >= logLimit) { continue; } accumSrc[idx] = dstChars[i]; if (!matchingPair(bidi, idx, srcChars[idx], dstChars[i])) { errDst = true; } } if (errMap) { if (testOK) { printCaseInfo(bidi, src, dst); testOK = false; } errln("Mismatch between getVisualMap() and getLogicalIndex()"); errcont("Map :" + valueOf(visMap)); errcont("\n"); errcont("Indexes:"); for (i = 0; i < visLimit; i++) { errcont(" " + bidi.getLogicalIndex(i)); } errcont("\n"); } if (errDst) { if (testOK) { printCaseInfo(bidi, src, dst); testOK = false; } errln("Result does not map to Source"); errcontln("We got: " + new String(accumSrc)); } return testOK; } }
apache-2.0
Bananeweizen/cgeo
tests/src-android/cgeo/geocaching/log/LogTypeTest.java
1031
package cgeo.geocaching.log; import static org.assertj.core.api.Java6Assertions.assertThat; import junit.framework.TestCase; public class LogTypeTest extends TestCase { public static void testGetById() { assertThat(LogType.getById(0)).isEqualTo(LogType.UNKNOWN); assertThat(LogType.getById(4711)).isEqualTo(LogType.UNKNOWN); assertThat(LogType.getById(23)).isEqualTo(LogType.ENABLE_LISTING); } public static void testGetByIconName() { assertThat(LogType.getByIconName("")).isEqualTo(LogType.UNKNOWN); assertThat(LogType.getByIconName(null)).isEqualTo(LogType.UNKNOWN); assertThat(LogType.getByIconName("11")).isEqualTo(LogType.WEBCAM_PHOTO_TAKEN); } public static void testGetByType() { assertThat(LogType.getByType("obviously unknown type")).isEqualTo(LogType.UNKNOWN); assertThat(LogType.getByType("grabbed it")).isEqualTo(LogType.GRABBED_IT); assertThat(LogType.getByType(" gRAbbed IT ")).isEqualTo(LogType.GRABBED_IT); } }
apache-2.0
ankitsinghal/phoenix
phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
9199
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.pherf; import static org.junit.Assert.*; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.apache.phoenix.pherf.jmx.MonitorManager; import org.apache.phoenix.pherf.result.file.Extension; import org.apache.phoenix.pherf.result.file.ResultFileDetails; import org.apache.phoenix.pherf.result.impl.CSVFileResultHandler; import org.apache.phoenix.pherf.result.impl.XMLResultHandler; import org.apache.phoenix.pherf.result.*; import org.junit.Test; import org.apache.phoenix.pherf.configuration.Query; public class ResultTest extends ResultBaseTest { @Test public void testMonitorWriter() throws Exception { String[] row = "org.apache.phoenix.pherf:type=PherfWriteThreads,6,Mon Jan 05 15:14:00 PST 2015".split(PherfConstants.RESULT_FILE_DELIMETER); ResultHandler resultMonitorWriter = null; List<ResultValue> resultValues = new ArrayList<>(); for (String val : row) { resultValues.add(new ResultValue(val)); } try { resultMonitorWriter = new CSVFileResultHandler(); resultMonitorWriter.setResultFileDetails(ResultFileDetails.CSV_MONITOR); resultMonitorWriter.setResultFileName(PherfConstants.MONITOR_FILE_NAME); Result result = new Result(ResultFileDetails.CSV_MONITOR, ResultFileDetails.CSV_MONITOR.getHeader().toString(), resultValues); resultMonitorWriter.write(result); resultMonitorWriter.write(result); resultMonitorWriter.write(result); resultMonitorWriter.close(); List<Result> results = resultMonitorWriter.read(); assertEquals("Results did not contain row.", results.size(), 3); } finally { if (resultMonitorWriter != null) { resultMonitorWriter.flush(); resultMonitorWriter.close(); } } } @Test public void testMonitorResult() throws Exception { ExecutorService executorService = Executors.newFixedThreadPool(1); MonitorManager monitor = new MonitorManager(100); Future future = executorService.submit(monitor.execute()); List<Result> records; final int TIMEOUT = 30; int ct = 0; int max = 30; // Wait while we write some rows. while (!future.isDone()) { Thread.sleep(100); if (ct == max) { int timer = 0; monitor.complete(); while (monitor.isRunning() && (timer < TIMEOUT)) { System.out.println("Waiting for monitor to finish. Seconds Waited :" + timer); Thread.sleep(1000); timer++; } } ct++; } executorService.shutdown(); records = monitor.readResults(); assertNotNull("Could not retrieve records", records); assertTrue("Failed to get correct CSV records.", records.size() > 0); assertFalse("Monitor was not stopped correctly.", monitor.isRunning()); } @Test public void testExtensionEnum() { assertEquals("Extension did not match", Extension.CSV.toString(), ".csv"); assertEquals("Extension did not match", Extension.DETAILED_CSV.toString(), "_detail.csv"); } @Test public void testResult() throws Exception { String filename = "testresult"; ResultHandler xmlResultHandler = new XMLResultHandler(); xmlResultHandler.setResultFileDetails(ResultFileDetails.XML); xmlResultHandler.setResultFileName(filename); ResultManager resultManager = new ResultManager(filename, Arrays.asList(xmlResultHandler)); assertTrue("Default Handlers were not initialized.", resultManager.getResultHandlers().size() > 0); // write result to file DataModelResult dataModelResult = setUpDataModelResult(); resultManager.write(dataModelResult); // Put some stuff in a combined file List<DataModelResult> modelResults = new ArrayList<>(); modelResults.add(dataModelResult); modelResults.add(dataModelResult); resultManager.write(modelResults); resultManager.flush(); // read result from file List<Result> resultList = xmlResultHandler.read(); ResultValue<DataModelResult> resultValue = resultList.get(0).getResultValues().get(0); DataModelResult dataModelResultFromFile = resultValue.getResultValue(); ScenarioResult scenarioResultFromFile = dataModelResultFromFile.getScenarioResult().get(0); QuerySetResult querySetResultFromFile = scenarioResultFromFile.getQuerySetResult().get(0); QueryResult queryResultFromFile = querySetResultFromFile.getQueryResults().get(0); ThreadTime ttFromFile = queryResultFromFile.getThreadTimes().get(0); // thread level verification assertEquals(new Long(10), ttFromFile.getMinTimeInMs().getElapsedDurationInMs()); assertEquals(new Long(30), ttFromFile.getMaxTimeInMs().getElapsedDurationInMs()); assertEquals(20, (int) ttFromFile.getAvgTimeInMs()); // 3rd runtime has the earliest start time, therefore that's what's expected. QueryResult qr = dataModelResult.getScenarioResult().get(0).getQuerySetResult().get(0) .getQueryResults().get(0); List<RunTime> runTimes = qr.getThreadTimes().get(0).getRunTimesInMs(); assertEquals(runTimes.get(2).getStartTime(), ttFromFile.getStartTime()); assertEquals(runTimes.get(0).getResultRowCount(), ttFromFile.getRunTimesInMs().get(0).getResultRowCount()); assertEquals(runTimes.get(1).getResultRowCount(), ttFromFile.getRunTimesInMs().get(1).getResultRowCount()); assertEquals(runTimes.get(2).getResultRowCount(), ttFromFile.getRunTimesInMs().get(2).getResultRowCount()); // query result level verification assertEquals(10, queryResultFromFile.getAvgMinRunTimeInMs()); assertEquals(30, queryResultFromFile.getAvgMaxRunTimeInMs()); assertEquals(20, queryResultFromFile.getAvgRunTimeInMs()); } private DataModelResult setUpDataModelResult() { DataModelResult dataModelResult = new DataModelResult(); dataModelResult.setZookeeper("mytestzk"); ScenarioResult scenarioResult = new ScenarioResult(); scenarioResult.setTableName("MY_TABLE_NAME"); scenarioResult.setName("MY_TEST_SCENARIO"); dataModelResult.getScenarioResult().add(scenarioResult); scenarioResult.setRowCount(999); QuerySetResult querySetResult = new QuerySetResult(); querySetResult.setConcurrency("50"); scenarioResult.getQuerySetResult().add(querySetResult); Query query = new Query(); Query query2 = new Query(); // add some spaces so we test query gets normalized query.setQueryGroup("g123"); query.setTenantId("tennantID123"); query.setStatement("Select * \n" + "from FHA"); query2.setStatement("Select a, b, c * \n" + "from FHA2"); assertEquals("Expected consecutive spaces to be normalized", "Select * from FHA", query.getStatement()); QueryResult queryResult = new QueryResult(query); QueryResult queryResult2 = new QueryResult(query2); querySetResult.getQueryResults().add(queryResult); querySetResult.getQueryResults().add(queryResult2); ThreadTime tt = new ThreadTime(); tt.setThreadName("thread1"); Calendar calendar = Calendar.getInstance(); Date startTime1 = calendar.getTime(); RunTime runtime1 = new RunTime(startTime1, 1000L, new Long(10)); tt.getRunTimesInMs().add(runtime1); calendar.add(Calendar.MINUTE, -1); RunTime runtime2 = new RunTime(calendar.getTime(), 2000L, new Long(20)); tt.getRunTimesInMs().add(runtime2); calendar.add(Calendar.MINUTE, -1); RunTime runtime3 = new RunTime(calendar.getTime(), 3000L, new Long(30)); tt.getRunTimesInMs().add(runtime3); queryResult.getThreadTimes().add(tt); queryResult2.getThreadTimes().add(tt); return dataModelResult; } }
apache-2.0
aureagle/cassandra
test/unit/org/apache/cassandra/cql3/PstmtPersistenceTest.java
8105
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.Before; import org.junit.Test; import junit.framework.Assert; import org.apache.cassandra.cql3.statements.ParsedStatement; import org.apache.cassandra.db.SystemKeyspace; import org.apache.cassandra.db.marshal.Int32Type; import org.apache.cassandra.db.marshal.UTF8Type; import org.apache.cassandra.schema.SchemaConstants; import org.apache.cassandra.schema.SchemaKeyspace; import org.apache.cassandra.service.ClientState; import org.apache.cassandra.service.QueryState; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.MD5Digest; import static org.junit.Assert.*; public class PstmtPersistenceTest extends CQLTester { @Before public void setUp() { QueryProcessor.clearPreparedStatements(false); } @Test public void testCachedPreparedStatements() throws Throwable { // need this for pstmt execution/validation tests requireNetwork(); assertEquals(0, numberOfStatementsOnDisk()); execute("CREATE KEYSPACE IF NOT EXISTS foo WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'}"); execute("CREATE TABLE foo.bar (key text PRIMARY KEY, val int)"); ClientState clientState = ClientState.forExternalCalls(InetSocketAddress.createUnresolved("127.0.0.1", 1234)); createTable("CREATE TABLE %s (pk int PRIMARY KEY, val text)"); List<MD5Digest> stmtIds = new ArrayList<>(); // #0 stmtIds.add(prepareStatement("SELECT * FROM %s WHERE keyspace_name = ?", SchemaConstants.SCHEMA_KEYSPACE_NAME, SchemaKeyspace.TABLES, clientState)); // #1 stmtIds.add(prepareStatement("SELECT * FROM %s WHERE pk = ?", clientState)); // #2 stmtIds.add(prepareStatement("SELECT * FROM %s WHERE key = ?", "foo", "bar", clientState)); clientState.setKeyspace("foo"); // #3 stmtIds.add(prepareStatement("SELECT * FROM %s WHERE pk = ?", clientState)); // #4 stmtIds.add(prepareStatement("SELECT * FROM %S WHERE key = ?", "foo", "bar", clientState)); assertEquals(5, stmtIds.size()); assertEquals(5, QueryProcessor.preparedStatementsCount()); assertEquals(5, numberOfStatementsOnDisk()); QueryHandler handler = ClientState.getCQLQueryHandler(); validatePstmts(stmtIds, handler); // clear prepared statements cache QueryProcessor.clearPreparedStatements(true); assertEquals(0, QueryProcessor.preparedStatementsCount()); for (MD5Digest stmtId : stmtIds) Assert.assertNull(handler.getPrepared(stmtId)); // load prepared statements and validate that these still execute fine QueryProcessor.preloadPreparedStatement(); validatePstmts(stmtIds, handler); // validate that the prepared statements are in the system table String queryAll = "SELECT * FROM " + SchemaConstants.SYSTEM_KEYSPACE_NAME + '.' + SystemKeyspace.PREPARED_STATEMENTS; for (UntypedResultSet.Row row : QueryProcessor.executeOnceInternal(queryAll)) { MD5Digest digest = MD5Digest.wrap(ByteBufferUtil.getArray(row.getBytes("prepared_id"))); ParsedStatement.Prepared prepared = QueryProcessor.instance.getPrepared(digest); Assert.assertNotNull(prepared); } // add anther prepared statement and sync it to table prepareStatement("SELECT * FROM %s WHERE key = ?", "foo", "bar", clientState); assertEquals(6, numberOfStatementsInMemory()); assertEquals(6, numberOfStatementsOnDisk()); // drop a keyspace (prepared statements are removed - syncPreparedStatements() remove should the rows, too) execute("DROP KEYSPACE foo"); assertEquals(3, numberOfStatementsInMemory()); assertEquals(3, numberOfStatementsOnDisk()); } private void validatePstmts(List<MD5Digest> stmtIds, QueryHandler handler) { assertEquals(5, QueryProcessor.preparedStatementsCount()); QueryOptions optionsStr = QueryOptions.forInternalCalls(Collections.singletonList(UTF8Type.instance.fromString("foobar"))); QueryOptions optionsInt = QueryOptions.forInternalCalls(Collections.singletonList(Int32Type.instance.decompose(42))); validatePstmt(handler, stmtIds.get(0), optionsStr); validatePstmt(handler, stmtIds.get(1), optionsInt); validatePstmt(handler, stmtIds.get(2), optionsStr); validatePstmt(handler, stmtIds.get(3), optionsInt); validatePstmt(handler, stmtIds.get(4), optionsStr); } private static void validatePstmt(QueryHandler handler, MD5Digest stmtId, QueryOptions options) { ParsedStatement.Prepared prepared = handler.getPrepared(stmtId); assertNotNull(prepared); handler.processPrepared(prepared.statement, QueryState.forInternalCalls(), options, Collections.emptyMap(), System.nanoTime()); } @Test public void testPstmtInvalidation() throws Throwable { ClientState clientState = ClientState.forInternalCalls(); createTable("CREATE TABLE %s (key int primary key, val int)"); for (int cnt = 1; cnt < 10000; cnt++) { prepareStatement("INSERT INTO %s (key, val) VALUES (?, ?) USING TIMESTAMP " + cnt, clientState); if (numberOfEvictedStatements() > 0) { assertEquals("Number of statements in table and in cache don't match", numberOfStatementsInMemory(), numberOfStatementsOnDisk()); // prepare a more statements to trigger more evictions for (int cnt2 = 1; cnt2 < 10; cnt2++) prepareStatement("INSERT INTO %s (key, val) VALUES (?, ?) USING TIMESTAMP " + cnt2, clientState); // each new prepared statement should have caused an eviction assertEquals("eviction count didn't increase by the expected number", numberOfEvictedStatements(), 10); assertEquals("Number of statements in table and in cache don't match", numberOfStatementsInMemory(), numberOfStatementsOnDisk()); return; } } fail("Prepared statement eviction does not work"); } private long numberOfStatementsOnDisk() throws Throwable { UntypedResultSet.Row row = execute("SELECT COUNT(*) FROM " + SchemaConstants.SYSTEM_KEYSPACE_NAME + '.' + SystemKeyspace.PREPARED_STATEMENTS).one(); return row.getLong("count"); } private long numberOfStatementsInMemory() { return QueryProcessor.preparedStatementsCount(); } private long numberOfEvictedStatements() { return QueryProcessor.metrics.preparedStatementsEvicted.getCount(); } private MD5Digest prepareStatement(String stmt, ClientState clientState) { return prepareStatement(stmt, keyspace(), currentTable(), clientState); } private MD5Digest prepareStatement(String stmt, String keyspace, String table, ClientState clientState) { return QueryProcessor.prepare(String.format(stmt, keyspace + "." + table), clientState).statementId; } }
apache-2.0
philipwhiuk/q-mail
qmail-library/src/main/java/com/fsck/k9/mail/store/pop3/Pop3Settings.java
361
package com.fsck.k9.mail.store.pop3; import com.fsck.k9.mail.AuthType; import com.fsck.k9.mail.ConnectionSecurity; interface Pop3Settings { String getHost(); int getPort(); ConnectionSecurity getConnectionSecurity(); AuthType getAuthType(); String getUsername(); String getPassword(); String getClientCertificateAlias(); }
apache-2.0
facebook/litho
lib/yoga/src/main/java/com/facebook/yoga/YogaJustify.java
867
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.yoga; public enum YogaJustify { FLEX_START(0), CENTER(1), FLEX_END(2), SPACE_BETWEEN(3), SPACE_AROUND(4), SPACE_EVENLY(5); private final int mIntValue; YogaJustify(int intValue) { mIntValue = intValue; } public int intValue() { return mIntValue; } public static YogaJustify fromInt(int value) { switch (value) { case 0: return FLEX_START; case 1: return CENTER; case 2: return FLEX_END; case 3: return SPACE_BETWEEN; case 4: return SPACE_AROUND; case 5: return SPACE_EVENLY; default: throw new IllegalArgumentException("Unknown enum value: " + value); } } }
apache-2.0
rmetzger/flink
flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskExecutorMemoryConfiguration.java
10479
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.taskexecutor; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.MemorySize; import org.apache.flink.shaded.guava18.com.google.common.base.MoreObjects; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonInclude; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty; import java.io.Serializable; import java.util.Objects; import static org.apache.flink.configuration.TaskManagerOptions.FRAMEWORK_HEAP_MEMORY; import static org.apache.flink.configuration.TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY; import static org.apache.flink.configuration.TaskManagerOptions.JVM_METASPACE; import static org.apache.flink.configuration.TaskManagerOptions.JVM_OVERHEAD_MAX; import static org.apache.flink.configuration.TaskManagerOptions.MANAGED_MEMORY_SIZE; import static org.apache.flink.configuration.TaskManagerOptions.NETWORK_MEMORY_MAX; import static org.apache.flink.configuration.TaskManagerOptions.TASK_HEAP_MEMORY; import static org.apache.flink.configuration.TaskManagerOptions.TASK_OFF_HEAP_MEMORY; import static org.apache.flink.configuration.TaskManagerOptions.TOTAL_FLINK_MEMORY; import static org.apache.flink.configuration.TaskManagerOptions.TOTAL_PROCESS_MEMORY; /** TaskExecutorConfiguration collects the configuration of a TaskExecutor instance. */ public class TaskExecutorMemoryConfiguration implements Serializable { public static final String FIELD_NAME_FRAMEWORK_HEAP = "frameworkHeap"; public static final String FIELD_NAME_TASK_HEAP = "taskHeap"; public static final String FIELD_NAME_FRAMEWORK_OFFHEAP = "frameworkOffHeap"; public static final String FIELD_NAME_TASK_OFFHEAP = "taskOffHeap"; public static final String FIELD_NAME_NETWORK_MEMORY = "networkMemory"; public static final String FIELD_NAME_MANAGED_MEMORY = "managedMemory"; public static final String FIELD_NAME_JVM_METASPACE = "jvmMetaspace"; public static final String FIELD_NAME_JVM_OVERHEAD = "jvmOverhead"; public static final String FIELD_NAME_TOTAL_FLINK_MEMORY = "totalFlinkMemory"; public static final String FIELD_NAME_TOTAL_PROCESS_MEMORY = "totalProcessMemory"; @JsonProperty(FIELD_NAME_FRAMEWORK_HEAP) @JsonInclude private final Long frameworkHeap; @JsonProperty(FIELD_NAME_TASK_HEAP) private final Long taskHeap; @JsonProperty(FIELD_NAME_FRAMEWORK_OFFHEAP) private final Long frameworkOffHeap; @JsonProperty(FIELD_NAME_TASK_OFFHEAP) private final Long taskOffHeap; @JsonProperty(FIELD_NAME_NETWORK_MEMORY) private final Long networkMemory; @JsonProperty(FIELD_NAME_MANAGED_MEMORY) private final Long managedMemoryTotal; @JsonProperty(FIELD_NAME_JVM_METASPACE) private final Long jvmMetaspace; @JsonProperty(FIELD_NAME_JVM_OVERHEAD) private final Long jvmOverhead; @JsonProperty(FIELD_NAME_TOTAL_FLINK_MEMORY) private final Long totalFlinkMemory; @JsonProperty(FIELD_NAME_TOTAL_PROCESS_MEMORY) private final Long totalProcessMemory; private static Long getConfigurationValue( Configuration config, ConfigOption<? extends MemorySize> option) { MemorySize memorySize = config.get(option); return memorySize != null ? memorySize.getBytes() : null; } /** * Factory method for initializing a TaskExecutorMemoryConfiguration based on the passed * Configuration. * * @param config The Configuration used for initializing the TaskExecutorMemoryConfiguration. * @return The newly instantiated TaskExecutorMemoryConfiguration. */ public static TaskExecutorMemoryConfiguration create(Configuration config) { return new TaskExecutorMemoryConfiguration( getConfigurationValue(config, FRAMEWORK_HEAP_MEMORY), getConfigurationValue(config, TASK_HEAP_MEMORY), getConfigurationValue(config, FRAMEWORK_OFF_HEAP_MEMORY), getConfigurationValue(config, TASK_OFF_HEAP_MEMORY), getConfigurationValue(config, NETWORK_MEMORY_MAX), getConfigurationValue(config, MANAGED_MEMORY_SIZE), getConfigurationValue(config, JVM_METASPACE), getConfigurationValue(config, JVM_OVERHEAD_MAX), getConfigurationValue(config, TOTAL_FLINK_MEMORY), getConfigurationValue(config, TOTAL_PROCESS_MEMORY)); } @JsonCreator public TaskExecutorMemoryConfiguration( @JsonProperty(FIELD_NAME_FRAMEWORK_HEAP) Long frameworkHeap, @JsonProperty(FIELD_NAME_TASK_HEAP) Long taskHeap, @JsonProperty(FIELD_NAME_FRAMEWORK_OFFHEAP) Long frameworkOffHeap, @JsonProperty(FIELD_NAME_TASK_OFFHEAP) Long taskOffHeap, @JsonProperty(FIELD_NAME_NETWORK_MEMORY) Long networkMemory, @JsonProperty(FIELD_NAME_MANAGED_MEMORY) Long managedMemoryTotal, @JsonProperty(FIELD_NAME_JVM_METASPACE) Long jvmMetaspace, @JsonProperty(FIELD_NAME_JVM_OVERHEAD) Long jvmOverhead, @JsonProperty(FIELD_NAME_TOTAL_FLINK_MEMORY) Long totalFlinkMemory, @JsonProperty(FIELD_NAME_TOTAL_PROCESS_MEMORY) Long totalProcessMemory) { this.frameworkHeap = frameworkHeap; this.taskHeap = taskHeap; this.frameworkOffHeap = frameworkOffHeap; this.taskOffHeap = taskOffHeap; this.networkMemory = networkMemory; this.managedMemoryTotal = managedMemoryTotal; this.jvmMetaspace = jvmMetaspace; this.jvmOverhead = jvmOverhead; this.totalFlinkMemory = totalFlinkMemory; this.totalProcessMemory = totalProcessMemory; } /** Returns the configured heap size used by the framework. */ public Long getFrameworkHeap() { return frameworkHeap; } /** Returns the configured heap size used by the tasks. */ public Long getTaskHeap() { return taskHeap; } /** Returns the configured off-heap size used by the framework. */ public Long getFrameworkOffHeap() { return frameworkOffHeap; } /** Returns the configured off-heap size used by the tasks. */ public Long getTaskOffHeap() { return taskOffHeap; } /** Returns the configured maximum network memory. */ public Long getNetworkMemory() { return networkMemory; } /** Returns the total amount of memory reserved for by the MemoryManager. */ public Long getManagedMemoryTotal() { return managedMemoryTotal; } /** Returns the maximum Metaspace size allowed for the task manager. */ public Long getJvmMetaspace() { return jvmMetaspace; } /** * Returns the threshold for defining the maximum amount of memory used for the JVM overhead. */ public Long getJvmOverhead() { return jvmOverhead; } /** * Returns the amount of memory configured to be used by Flink excluding things like JVM * Metaspace and other JVM overhead. */ public Long getTotalFlinkMemory() { return totalFlinkMemory; } /** * Returns the total amount of memory configured to be used by the JVM including all the * different memory pools. */ public Long getTotalProcessMemory() { return totalProcessMemory; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TaskExecutorMemoryConfiguration that = (TaskExecutorMemoryConfiguration) o; return Objects.equals(frameworkHeap, that.frameworkHeap) && Objects.equals(taskHeap, that.taskHeap) && Objects.equals(frameworkOffHeap, that.frameworkOffHeap) && Objects.equals(taskOffHeap, that.taskOffHeap) && Objects.equals(networkMemory, that.networkMemory) && Objects.equals(managedMemoryTotal, that.managedMemoryTotal) && Objects.equals(jvmMetaspace, that.jvmMetaspace) && Objects.equals(jvmOverhead, that.jvmOverhead) && Objects.equals(totalFlinkMemory, that.totalFlinkMemory) && Objects.equals(totalProcessMemory, that.totalProcessMemory); } @Override public int hashCode() { return Objects.hash( frameworkHeap, taskHeap, frameworkOffHeap, taskOffHeap, networkMemory, managedMemoryTotal, jvmMetaspace, jvmOverhead, totalFlinkMemory, totalProcessMemory); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add(FIELD_NAME_FRAMEWORK_HEAP, frameworkHeap) .add(FIELD_NAME_TASK_HEAP, taskHeap) .add(FIELD_NAME_FRAMEWORK_OFFHEAP, frameworkOffHeap) .add(FIELD_NAME_TASK_OFFHEAP, taskOffHeap) .add(FIELD_NAME_NETWORK_MEMORY, networkMemory) .add(FIELD_NAME_MANAGED_MEMORY, managedMemoryTotal) .add(FIELD_NAME_JVM_METASPACE, jvmMetaspace) .add(FIELD_NAME_JVM_OVERHEAD, jvmOverhead) .add(FIELD_NAME_TOTAL_FLINK_MEMORY, totalFlinkMemory) .add(FIELD_NAME_TOTAL_PROCESS_MEMORY, totalProcessMemory) .toString(); } }
apache-2.0
TommesDee/cpachecker
test/programs/java/Statements/pack/SubSubType1.java
62
package pack; public class SubSubType1 extends SubType1 { }
apache-2.0
camunda/camunda-bpmn-model
src/test/java/org/camunda/bpm/model/bpmn/instance/DataObjectTest.java
1513
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.model.bpmn.instance; import java.util.Arrays; import java.util.Collection; /** * @author Dario Campagna */ public class DataObjectTest extends BpmnModelElementInstanceTest { public TypeAssumption getTypeAssumption() { return new TypeAssumption(FlowElement.class, false); } public Collection<ChildElementAssumption> getChildElementAssumptions() { return Arrays.asList( new ChildElementAssumption(DataState.class, 0, 1) ); } public Collection<AttributeAssumption> getAttributesAssumptions() { return Arrays.asList( new AttributeAssumption("itemSubjectRef"), new AttributeAssumption("isCollection", false, false, false) ); } }
apache-2.0
jhrcek/kie-wb-common
kie-wb-common-forms/kie-wb-common-dynamic-forms/kie-wb-common-dynamic-forms-client/src/main/java/org/kie/workbench/common/forms/dynamic/client/rendering/formGroups/impl/slider/SliderFormGroupViewImpl.java
2646
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.forms.dynamic.client.rendering.formGroups.impl.slider; import javax.inject.Inject; import com.google.gwt.user.client.ui.Widget; import org.gwtbootstrap3.client.ui.constants.ValidationState; import org.jboss.errai.common.client.dom.DOMUtil; import org.jboss.errai.common.client.dom.Div; import org.jboss.errai.ui.shared.api.annotations.DataField; import org.jboss.errai.ui.shared.api.annotations.Templated; import org.kie.workbench.common.forms.dynamic.client.rendering.formGroups.labels.label.FieldLabel; import org.kie.workbench.common.forms.model.FieldDefinition; @Templated public class SliderFormGroupViewImpl implements SliderFormGroupView { @Inject @DataField private FieldLabel fieldLabel; @Inject @DataField protected Div fieldContainer; @Inject @DataField protected Div helpBlock; @Override public void render(Widget widget, FieldDefinition fieldDefinition) { render("", widget, fieldDefinition); } public void render(String inputId, Widget widget, FieldDefinition fieldDefinition) { fieldLabel.renderForInputId(inputId, fieldDefinition); DOMUtil.appendWidgetToElement(fieldContainer, widget); } @Override public void clearError() { DOMUtil.removeEnumStyleName(getElement(), ValidationState.ERROR); DOMUtil.addEnumStyleName(getElement(), ValidationState.NONE); helpBlock.setTextContent(""); } @Override public void showError(String error) { DOMUtil.removeEnumStyleName(getElement(), ValidationState.NONE); DOMUtil.addEnumStyleName(getElement(), ValidationState.ERROR); helpBlock.setTextContent(error); } }
apache-2.0
tectronics/splinelibrary
2.3/src/org/drip/analytics/holset/ARSHoliday.java
37853
package org.drip.analytics.holset; /* * -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* * GENERATED on Fri Jan 11 19:54:06 EST 2013 ---- DO NOT DELETE */ /*! * Copyright (C) 2013 Lakshmi Krishnamurthy * Copyright (C) 2012 Lakshmi Krishnamurthy * Copyright (C) 2011 Lakshmi Krishnamurthy * * This file is part of CreditAnalytics, a free-software/open-source library for * fixed income analysts and developers - http://www.credit-trader.org * * CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special focus * towards the needs of the bonds and credit products community. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ public class ARSHoliday implements org.drip.analytics.holset.LocationHoliday { public ARSHoliday() { } public java.lang.String getHolidayLoc() { return "ARS"; } public org.drip.analytics.holiday.Locale getHolidaySet() { org.drip.analytics.holiday.Locale lh = new org.drip.analytics.holiday.Locale(); lh.addStaticHoliday ("01-JAN-1999", "New Years Day"); lh.addStaticHoliday ("01-APR-1999", "Holy Thursday"); lh.addStaticHoliday ("02-APR-1999", "Good Friday"); lh.addStaticHoliday ("25-MAY-1999", "Liberation Day"); lh.addStaticHoliday ("14-JUN-1999", "Malvinas Islands Memorial Day Observed"); lh.addStaticHoliday ("21-JUN-1999", "Flag Day"); lh.addStaticHoliday ("09-JUL-1999", "Independence Day"); lh.addStaticHoliday ("16-AUG-1999", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("11-OCT-1999", "Day of the Race Observed"); lh.addStaticHoliday ("08-DEC-1999", "Immaculate Conception"); lh.addStaticHoliday ("31-DEC-1999", "Last Weekday of the Year"); lh.addStaticHoliday ("20-APR-2000", "Holy Thursday"); lh.addStaticHoliday ("21-APR-2000", "Good Friday"); lh.addStaticHoliday ("01-MAY-2000", "Labour Day"); lh.addStaticHoliday ("25-MAY-2000", "Liberation Day"); lh.addStaticHoliday ("19-JUN-2000", "Flag Day"); lh.addStaticHoliday ("21-AUG-2000", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("16-OCT-2000", "Day of the Race"); lh.addStaticHoliday ("06-NOV-2000", "Bank Holiday"); lh.addStaticHoliday ("08-DEC-2000", "Immaculate Conception"); lh.addStaticHoliday ("25-DEC-2000", "Christmas Day"); lh.addStaticHoliday ("29-DEC-2000", "Last Weekday of the Year"); lh.addStaticHoliday ("01-JAN-2001", "New Years Day"); lh.addStaticHoliday ("02-APR-2001", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("12-APR-2001", "Holy Thursday"); lh.addStaticHoliday ("13-APR-2001", "Good Friday"); lh.addStaticHoliday ("01-MAY-2001", "Labour Day"); lh.addStaticHoliday ("25-MAY-2001", "Liberation Day"); lh.addStaticHoliday ("18-JUN-2001", "Flag Day"); lh.addStaticHoliday ("09-JUL-2001", "Independence Day"); lh.addStaticHoliday ("20-AUG-2001", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("08-OCT-2001", "Columbus Day"); lh.addStaticHoliday ("06-NOV-2001", "Bank Holiday"); lh.addStaticHoliday ("21-DEC-2001", "Special Holiday"); lh.addStaticHoliday ("24-DEC-2001", "Special Holiday"); lh.addStaticHoliday ("25-DEC-2001", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2001", "Special Holiday"); lh.addStaticHoliday ("31-DEC-2001", "Special Holiday"); lh.addStaticHoliday ("01-JAN-2002", "New Years Day"); lh.addStaticHoliday ("28-MAR-2002", "Holy Thursday"); lh.addStaticHoliday ("29-MAR-2002", "Good Friday"); lh.addStaticHoliday ("02-APR-2002", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("01-MAY-2002", "Labour Day"); lh.addStaticHoliday ("17-JUN-2002", "Flag Day"); lh.addStaticHoliday ("09-JUL-2002", "Independence Day"); lh.addStaticHoliday ("19-AUG-2002", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("25-DEC-2002", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2003", "New Years Day"); lh.addStaticHoliday ("31-MAR-2003", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("17-APR-2003", "Holy Thursday"); lh.addStaticHoliday ("18-APR-2003", "Good Friday"); lh.addStaticHoliday ("01-MAY-2003", "Labour Day"); lh.addStaticHoliday ("16-JUN-2003", "Flag Day"); lh.addStaticHoliday ("09-JUL-2003", "Independence Day"); lh.addStaticHoliday ("18-AUG-2003", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("13-OCT-2003", "Day of the Race"); lh.addStaticHoliday ("06-NOV-2003", "Bank Holiday"); lh.addStaticHoliday ("08-DEC-2003", "Immaculate Conception"); lh.addStaticHoliday ("24-DEC-2003", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2003", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2004", "New Years Day"); lh.addStaticHoliday ("05-APR-2004", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("08-APR-2004", "Holy Thursday"); lh.addStaticHoliday ("09-APR-2004", "Good Friday"); lh.addStaticHoliday ("25-MAY-2004", "Liberation Day"); lh.addStaticHoliday ("21-JUN-2004", "Flag Day"); lh.addStaticHoliday ("02-JUL-2004", "Special Holiday"); lh.addStaticHoliday ("05-JUL-2004", "Special Holiday"); lh.addStaticHoliday ("09-JUL-2004", "Independence Day"); lh.addStaticHoliday ("16-AUG-2004", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("11-OCT-2004", "Day of the Race"); lh.addStaticHoliday ("08-DEC-2004", "Immaculate Conception"); lh.addStaticHoliday ("24-MAR-2005", "Holy Thursday"); lh.addStaticHoliday ("25-MAR-2005", "Good Friday"); lh.addStaticHoliday ("25-MAY-2005", "Liberation Day"); lh.addStaticHoliday ("20-JUN-2005", "Flag Day"); lh.addStaticHoliday ("15-AUG-2005", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("10-OCT-2005", "Day of the Race"); lh.addStaticHoliday ("08-DEC-2005", "Immaculate Conception"); lh.addStaticHoliday ("24-MAR-2006", "Memorial Day"); lh.addStaticHoliday ("13-APR-2006", "Holy Thursday"); lh.addStaticHoliday ("14-APR-2006", "Good Friday"); lh.addStaticHoliday ("01-MAY-2006", "Labour Day"); lh.addStaticHoliday ("25-MAY-2006", "Liberation Day"); lh.addStaticHoliday ("19-JUN-2006", "Flag Day"); lh.addStaticHoliday ("21-AUG-2006", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("16-OCT-2006", "Day of the Race"); lh.addStaticHoliday ("06-NOV-2006", "Bank Holiday"); lh.addStaticHoliday ("08-DEC-2006", "Immaculate Conception"); lh.addStaticHoliday ("25-DEC-2006", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2007", "New Years Day"); lh.addStaticHoliday ("02-APR-2007", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("05-APR-2007", "Holy Thursday"); lh.addStaticHoliday ("06-APR-2007", "Good Friday"); lh.addStaticHoliday ("01-MAY-2007", "Labour Day"); lh.addStaticHoliday ("25-MAY-2007", "Liberation Day"); lh.addStaticHoliday ("18-JUN-2007", "Flag Day"); lh.addStaticHoliday ("09-JUL-2007", "Independence Day"); lh.addStaticHoliday ("20-AUG-2007", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("15-OCT-2007", "Day of the Race"); lh.addStaticHoliday ("06-NOV-2007", "Bank Holiday"); lh.addStaticHoliday ("24-DEC-2007", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2007", "Christmas Day"); lh.addStaticHoliday ("31-DEC-2007", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2008", "New Years Day"); lh.addStaticHoliday ("20-MAR-2008", "Holy Thursday"); lh.addStaticHoliday ("21-MAR-2008", "Good Friday"); lh.addStaticHoliday ("24-MAR-2008", "Memorial Day"); lh.addStaticHoliday ("02-APR-2008", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("01-MAY-2008", "Labour Day"); lh.addStaticHoliday ("16-JUN-2008", "Flag Day"); lh.addStaticHoliday ("09-JUL-2008", "Independence Day"); lh.addStaticHoliday ("18-AUG-2008", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("06-NOV-2008", "Bank Holiday"); lh.addStaticHoliday ("08-DEC-2008", "Immaculate Conception"); lh.addStaticHoliday ("25-DEC-2008", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2009", "New Years Day"); lh.addStaticHoliday ("24-MAR-2009", "Memorial Day"); lh.addStaticHoliday ("02-APR-2009", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("09-APR-2009", "Holy Thursday"); lh.addStaticHoliday ("10-APR-2009", "Good Friday"); lh.addStaticHoliday ("01-MAY-2009", "Labour Day"); lh.addStaticHoliday ("25-MAY-2009", "Liberation Day"); lh.addStaticHoliday ("15-JUN-2009", "Flag Day"); lh.addStaticHoliday ("09-JUL-2009", "Independence Day"); lh.addStaticHoliday ("17-AUG-2009", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("12-OCT-2009", "Day of the Race"); lh.addStaticHoliday ("06-NOV-2009", "Bank Holiday"); lh.addStaticHoliday ("08-DEC-2009", "Immaculate Conception"); lh.addStaticHoliday ("25-DEC-2009", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2010", "New Years Day"); lh.addStaticHoliday ("24-MAR-2010", "Memorial Day"); lh.addStaticHoliday ("01-APR-2010", "Holy Thursday"); lh.addStaticHoliday ("02-APR-2010", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("24-MAY-2010", "Bank Holiday"); lh.addStaticHoliday ("25-MAY-2010", "Liberation Day"); lh.addStaticHoliday ("21-JUN-2010", "Flag Day"); lh.addStaticHoliday ("09-JUL-2010", "Independence Day"); lh.addStaticHoliday ("16-AUG-2010", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("11-OCT-2010", "Day of the Race"); lh.addStaticHoliday ("27-OCT-2010", "Buenos Aires bank holidays"); lh.addStaticHoliday ("22-NOV-2010", "ARS holidays"); lh.addStaticHoliday ("08-DEC-2010", "Immaculate Conception"); lh.addStaticHoliday ("07-MAR-2011", "New Holiday"); lh.addStaticHoliday ("08-MAR-2011", "New Holiday"); lh.addStaticHoliday ("24-MAR-2011", "Memorial Day"); lh.addStaticHoliday ("25-MAR-2011", "Memorial Day"); lh.addStaticHoliday ("21-APR-2011", "Holy Thursday"); lh.addStaticHoliday ("22-APR-2011", "Good Friday"); lh.addStaticHoliday ("25-MAY-2011", "Liberation Day"); lh.addStaticHoliday ("20-JUN-2011", "Flag Day"); lh.addStaticHoliday ("15-AUG-2011", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("22-AUG-2011", "Holiday"); lh.addStaticHoliday ("10-OCT-2011", "Day of the Race"); lh.addStaticHoliday ("08-DEC-2011", "Immaculate Conception"); lh.addStaticHoliday ("09-DEC-2011", "New Holiday"); lh.addStaticHoliday ("02-APR-2012", "Malvinas Islands Memorial Day"); lh.addStaticHoliday ("05-APR-2012", "Holy Thursday"); lh.addStaticHoliday ("06-APR-2012", "Good Friday"); lh.addStaticHoliday ("01-MAY-2012", "Labour Day"); lh.addStaticHoliday ("25-MAY-2012", "Liberation Day"); lh.addStaticHoliday ("18-JUN-2012", "Flag Day"); lh.addStaticHoliday ("09-JUL-2012", "Independence Day"); lh.addStaticHoliday ("20-AUG-2012", "Anniversary of the Death of General San Martin"); lh.addStaticHoliday ("15-OCT-2012", "Day of the Race"); lh.addStaticHoliday ("06-NOV-2012", "Bank Holiday"); lh.addStaticHoliday ("25-DEC-2012", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2013", "ARS"); lh.addStaticHoliday ("28-MAR-2013", "ARS"); lh.addStaticHoliday ("29-MAR-2013", "ARS"); lh.addStaticHoliday ("02-APR-2013", "ARS"); lh.addStaticHoliday ("01-MAY-2013", "ARS"); lh.addStaticHoliday ("17-JUN-2013", "ARS"); lh.addStaticHoliday ("09-JUL-2013", "ARS"); lh.addStaticHoliday ("19-AUG-2013", "ARS"); lh.addStaticHoliday ("14-OCT-2013", "ARS"); lh.addStaticHoliday ("06-NOV-2013", "ARS"); lh.addStaticHoliday ("25-DEC-2013", "ARS"); lh.addStaticHoliday ("01-JAN-2014", "ARS"); lh.addStaticHoliday ("24-MAR-2014", "ARS"); lh.addStaticHoliday ("02-APR-2014", "ARS"); lh.addStaticHoliday ("17-APR-2014", "ARS"); lh.addStaticHoliday ("18-APR-2014", "ARS"); lh.addStaticHoliday ("01-MAY-2014", "ARS"); lh.addStaticHoliday ("16-JUN-2014", "ARS"); lh.addStaticHoliday ("09-JUL-2014", "ARS"); lh.addStaticHoliday ("18-AUG-2014", "ARS"); lh.addStaticHoliday ("13-OCT-2014", "ARS"); lh.addStaticHoliday ("06-NOV-2014", "ARS"); lh.addStaticHoliday ("08-DEC-2014", "ARS"); lh.addStaticHoliday ("25-DEC-2014", "ARS"); lh.addStaticHoliday ("01-JAN-2015", "ARS"); lh.addStaticHoliday ("24-MAR-2015", "ARS"); lh.addStaticHoliday ("02-APR-2015", "ARS"); lh.addStaticHoliday ("03-APR-2015", "ARS"); lh.addStaticHoliday ("01-MAY-2015", "ARS"); lh.addStaticHoliday ("25-MAY-2015", "ARS"); lh.addStaticHoliday ("15-JUN-2015", "ARS"); lh.addStaticHoliday ("09-JUL-2015", "ARS"); lh.addStaticHoliday ("17-AUG-2015", "ARS"); lh.addStaticHoliday ("12-OCT-2015", "ARS"); lh.addStaticHoliday ("06-NOV-2015", "ARS"); lh.addStaticHoliday ("08-DEC-2015", "ARS"); lh.addStaticHoliday ("25-DEC-2015", "ARS"); lh.addStaticHoliday ("01-JAN-2016", "ARS"); lh.addStaticHoliday ("24-MAR-2016", "ARS"); lh.addStaticHoliday ("25-MAR-2016", "ARS"); lh.addStaticHoliday ("25-MAY-2016", "ARS"); lh.addStaticHoliday ("20-JUN-2016", "ARS"); lh.addStaticHoliday ("15-AUG-2016", "ARS"); lh.addStaticHoliday ("10-OCT-2016", "ARS"); lh.addStaticHoliday ("08-DEC-2016", "ARS"); lh.addStaticHoliday ("24-MAR-2017", "ARS"); lh.addStaticHoliday ("13-APR-2017", "ARS"); lh.addStaticHoliday ("14-APR-2017", "ARS"); lh.addStaticHoliday ("01-MAY-2017", "ARS"); lh.addStaticHoliday ("25-MAY-2017", "ARS"); lh.addStaticHoliday ("19-JUN-2017", "ARS"); lh.addStaticHoliday ("21-AUG-2017", "ARS"); lh.addStaticHoliday ("16-OCT-2017", "ARS"); lh.addStaticHoliday ("06-NOV-2017", "ARS"); lh.addStaticHoliday ("08-DEC-2017", "ARS"); lh.addStaticHoliday ("25-DEC-2017", "ARS"); lh.addStaticHoliday ("01-JAN-2018", "ARS"); lh.addStaticHoliday ("29-MAR-2018", "ARS"); lh.addStaticHoliday ("30-MAR-2018", "ARS"); lh.addStaticHoliday ("02-APR-2018", "ARS"); lh.addStaticHoliday ("01-MAY-2018", "ARS"); lh.addStaticHoliday ("25-MAY-2018", "ARS"); lh.addStaticHoliday ("18-JUN-2018", "ARS"); lh.addStaticHoliday ("09-JUL-2018", "ARS"); lh.addStaticHoliday ("20-AUG-2018", "ARS"); lh.addStaticHoliday ("15-OCT-2018", "ARS"); lh.addStaticHoliday ("06-NOV-2018", "ARS"); lh.addStaticHoliday ("25-DEC-2018", "ARS"); lh.addStaticHoliday ("01-JAN-2019", "ARS"); lh.addStaticHoliday ("02-APR-2019", "ARS"); lh.addStaticHoliday ("18-APR-2019", "ARS"); lh.addStaticHoliday ("19-APR-2019", "ARS"); lh.addStaticHoliday ("01-MAY-2019", "ARS"); lh.addStaticHoliday ("17-JUN-2019", "ARS"); lh.addStaticHoliday ("09-JUL-2019", "ARS"); lh.addStaticHoliday ("19-AUG-2019", "ARS"); lh.addStaticHoliday ("14-OCT-2019", "ARS"); lh.addStaticHoliday ("06-NOV-2019", "ARS"); lh.addStaticHoliday ("25-DEC-2019", "ARS"); lh.addStaticHoliday ("01-JAN-2020", "ARS"); lh.addStaticHoliday ("24-MAR-2020", "ARS"); lh.addStaticHoliday ("02-APR-2020", "ARS"); lh.addStaticHoliday ("09-APR-2020", "ARS"); lh.addStaticHoliday ("10-APR-2020", "ARS"); lh.addStaticHoliday ("01-MAY-2020", "ARS"); lh.addStaticHoliday ("25-MAY-2020", "ARS"); lh.addStaticHoliday ("15-JUN-2020", "ARS"); lh.addStaticHoliday ("09-JUL-2020", "ARS"); lh.addStaticHoliday ("17-AUG-2020", "ARS"); lh.addStaticHoliday ("12-OCT-2020", "ARS"); lh.addStaticHoliday ("06-NOV-2020", "ARS"); lh.addStaticHoliday ("08-DEC-2020", "ARS"); lh.addStaticHoliday ("25-DEC-2020", "ARS"); lh.addStaticHoliday ("01-JAN-2021", "ARS"); lh.addStaticHoliday ("24-MAR-2021", "ARS"); lh.addStaticHoliday ("01-APR-2021", "ARS"); lh.addStaticHoliday ("02-APR-2021", "ARS"); lh.addStaticHoliday ("25-MAY-2021", "ARS"); lh.addStaticHoliday ("21-JUN-2021", "ARS"); lh.addStaticHoliday ("09-JUL-2021", "ARS"); lh.addStaticHoliday ("16-AUG-2021", "ARS"); lh.addStaticHoliday ("11-OCT-2021", "ARS"); lh.addStaticHoliday ("08-DEC-2021", "ARS"); lh.addStaticHoliday ("24-MAR-2022", "ARS"); lh.addStaticHoliday ("14-APR-2022", "ARS"); lh.addStaticHoliday ("15-APR-2022", "ARS"); lh.addStaticHoliday ("25-MAY-2022", "ARS"); lh.addStaticHoliday ("20-JUN-2022", "ARS"); lh.addStaticHoliday ("15-AUG-2022", "ARS"); lh.addStaticHoliday ("10-OCT-2022", "ARS"); lh.addStaticHoliday ("08-DEC-2022", "ARS"); lh.addStaticHoliday ("24-MAR-2023", "ARS"); lh.addStaticHoliday ("06-APR-2023", "ARS"); lh.addStaticHoliday ("07-APR-2023", "ARS"); lh.addStaticHoliday ("01-MAY-2023", "ARS"); lh.addStaticHoliday ("25-MAY-2023", "ARS"); lh.addStaticHoliday ("19-JUN-2023", "ARS"); lh.addStaticHoliday ("21-AUG-2023", "ARS"); lh.addStaticHoliday ("16-OCT-2023", "ARS"); lh.addStaticHoliday ("06-NOV-2023", "ARS"); lh.addStaticHoliday ("08-DEC-2023", "ARS"); lh.addStaticHoliday ("25-DEC-2023", "ARS"); lh.addStaticHoliday ("01-JAN-2024", "ARS"); lh.addStaticHoliday ("28-MAR-2024", "ARS"); lh.addStaticHoliday ("29-MAR-2024", "ARS"); lh.addStaticHoliday ("02-APR-2024", "ARS"); lh.addStaticHoliday ("01-MAY-2024", "ARS"); lh.addStaticHoliday ("17-JUN-2024", "ARS"); lh.addStaticHoliday ("09-JUL-2024", "ARS"); lh.addStaticHoliday ("19-AUG-2024", "ARS"); lh.addStaticHoliday ("14-OCT-2024", "ARS"); lh.addStaticHoliday ("06-NOV-2024", "ARS"); lh.addStaticHoliday ("25-DEC-2024", "ARS"); lh.addStaticHoliday ("01-JAN-2025", "ARS"); lh.addStaticHoliday ("24-MAR-2025", "ARS"); lh.addStaticHoliday ("02-APR-2025", "ARS"); lh.addStaticHoliday ("17-APR-2025", "ARS"); lh.addStaticHoliday ("18-APR-2025", "ARS"); lh.addStaticHoliday ("01-MAY-2025", "ARS"); lh.addStaticHoliday ("16-JUN-2025", "ARS"); lh.addStaticHoliday ("09-JUL-2025", "ARS"); lh.addStaticHoliday ("18-AUG-2025", "ARS"); lh.addStaticHoliday ("13-OCT-2025", "ARS"); lh.addStaticHoliday ("06-NOV-2025", "ARS"); lh.addStaticHoliday ("08-DEC-2025", "ARS"); lh.addStaticHoliday ("25-DEC-2025", "ARS"); lh.addStaticHoliday ("01-JAN-2026", "ARS"); lh.addStaticHoliday ("24-MAR-2026", "ARS"); lh.addStaticHoliday ("02-APR-2026", "ARS"); lh.addStaticHoliday ("03-APR-2026", "ARS"); lh.addStaticHoliday ("01-MAY-2026", "ARS"); lh.addStaticHoliday ("25-MAY-2026", "ARS"); lh.addStaticHoliday ("15-JUN-2026", "ARS"); lh.addStaticHoliday ("09-JUL-2026", "ARS"); lh.addStaticHoliday ("17-AUG-2026", "ARS"); lh.addStaticHoliday ("12-OCT-2026", "ARS"); lh.addStaticHoliday ("06-NOV-2026", "ARS"); lh.addStaticHoliday ("08-DEC-2026", "ARS"); lh.addStaticHoliday ("25-DEC-2026", "ARS"); lh.addStaticHoliday ("01-JAN-2027", "ARS"); lh.addStaticHoliday ("24-MAR-2027", "ARS"); lh.addStaticHoliday ("25-MAR-2027", "ARS"); lh.addStaticHoliday ("26-MAR-2027", "ARS"); lh.addStaticHoliday ("02-APR-2027", "ARS"); lh.addStaticHoliday ("25-MAY-2027", "ARS"); lh.addStaticHoliday ("21-JUN-2027", "ARS"); lh.addStaticHoliday ("09-JUL-2027", "ARS"); lh.addStaticHoliday ("16-AUG-2027", "ARS"); lh.addStaticHoliday ("11-OCT-2027", "ARS"); lh.addStaticHoliday ("08-DEC-2027", "ARS"); lh.addStaticHoliday ("24-MAR-2028", "ARS"); lh.addStaticHoliday ("13-APR-2028", "ARS"); lh.addStaticHoliday ("14-APR-2028", "ARS"); lh.addStaticHoliday ("01-MAY-2028", "ARS"); lh.addStaticHoliday ("25-MAY-2028", "ARS"); lh.addStaticHoliday ("19-JUN-2028", "ARS"); lh.addStaticHoliday ("21-AUG-2028", "ARS"); lh.addStaticHoliday ("16-OCT-2028", "ARS"); lh.addStaticHoliday ("06-NOV-2028", "ARS"); lh.addStaticHoliday ("08-DEC-2028", "ARS"); lh.addStaticHoliday ("25-DEC-2028", "ARS"); lh.addStaticHoliday ("01-JAN-2029", "ARS"); lh.addStaticHoliday ("29-MAR-2029", "ARS"); lh.addStaticHoliday ("30-MAR-2029", "ARS"); lh.addStaticHoliday ("02-APR-2029", "ARS"); lh.addStaticHoliday ("01-MAY-2029", "ARS"); lh.addStaticHoliday ("25-MAY-2029", "ARS"); lh.addStaticHoliday ("18-JUN-2029", "ARS"); lh.addStaticHoliday ("09-JUL-2029", "ARS"); lh.addStaticHoliday ("20-AUG-2029", "ARS"); lh.addStaticHoliday ("15-OCT-2029", "ARS"); lh.addStaticHoliday ("06-NOV-2029", "ARS"); lh.addStaticHoliday ("25-DEC-2029", "ARS"); lh.addStaticHoliday ("01-JAN-2030", "ARS"); lh.addStaticHoliday ("02-APR-2030", "ARS"); lh.addStaticHoliday ("18-APR-2030", "ARS"); lh.addStaticHoliday ("19-APR-2030", "ARS"); lh.addStaticHoliday ("01-MAY-2030", "ARS"); lh.addStaticHoliday ("17-JUN-2030", "ARS"); lh.addStaticHoliday ("09-JUL-2030", "ARS"); lh.addStaticHoliday ("19-AUG-2030", "ARS"); lh.addStaticHoliday ("14-OCT-2030", "ARS"); lh.addStaticHoliday ("06-NOV-2030", "ARS"); lh.addStaticHoliday ("25-DEC-2030", "ARS"); lh.addStaticHoliday ("01-JAN-2031", "ARS"); lh.addStaticHoliday ("24-MAR-2031", "ARS"); lh.addStaticHoliday ("02-APR-2031", "ARS"); lh.addStaticHoliday ("10-APR-2031", "ARS"); lh.addStaticHoliday ("11-APR-2031", "ARS"); lh.addStaticHoliday ("01-MAY-2031", "ARS"); lh.addStaticHoliday ("16-JUN-2031", "ARS"); lh.addStaticHoliday ("09-JUL-2031", "ARS"); lh.addStaticHoliday ("18-AUG-2031", "ARS"); lh.addStaticHoliday ("13-OCT-2031", "ARS"); lh.addStaticHoliday ("06-NOV-2031", "ARS"); lh.addStaticHoliday ("08-DEC-2031", "ARS"); lh.addStaticHoliday ("25-DEC-2031", "ARS"); lh.addStaticHoliday ("01-JAN-2032", "ARS"); lh.addStaticHoliday ("24-MAR-2032", "ARS"); lh.addStaticHoliday ("25-MAR-2032", "ARS"); lh.addStaticHoliday ("26-MAR-2032", "ARS"); lh.addStaticHoliday ("02-APR-2032", "ARS"); lh.addStaticHoliday ("25-MAY-2032", "ARS"); lh.addStaticHoliday ("21-JUN-2032", "ARS"); lh.addStaticHoliday ("09-JUL-2032", "ARS"); lh.addStaticHoliday ("16-AUG-2032", "ARS"); lh.addStaticHoliday ("11-OCT-2032", "ARS"); lh.addStaticHoliday ("08-DEC-2032", "ARS"); lh.addStaticHoliday ("24-MAR-2033", "ARS"); lh.addStaticHoliday ("14-APR-2033", "ARS"); lh.addStaticHoliday ("15-APR-2033", "ARS"); lh.addStaticHoliday ("25-MAY-2033", "ARS"); lh.addStaticHoliday ("20-JUN-2033", "ARS"); lh.addStaticHoliday ("15-AUG-2033", "ARS"); lh.addStaticHoliday ("10-OCT-2033", "ARS"); lh.addStaticHoliday ("08-DEC-2033", "ARS"); lh.addStaticHoliday ("24-MAR-2034", "ARS"); lh.addStaticHoliday ("06-APR-2034", "ARS"); lh.addStaticHoliday ("07-APR-2034", "ARS"); lh.addStaticHoliday ("01-MAY-2034", "ARS"); lh.addStaticHoliday ("25-MAY-2034", "ARS"); lh.addStaticHoliday ("19-JUN-2034", "ARS"); lh.addStaticHoliday ("21-AUG-2034", "ARS"); lh.addStaticHoliday ("16-OCT-2034", "ARS"); lh.addStaticHoliday ("06-NOV-2034", "ARS"); lh.addStaticHoliday ("08-DEC-2034", "ARS"); lh.addStaticHoliday ("25-DEC-2034", "ARS"); lh.addStaticHoliday ("01-JAN-2035", "ARS"); lh.addStaticHoliday ("22-MAR-2035", "ARS"); lh.addStaticHoliday ("23-MAR-2035", "ARS"); lh.addStaticHoliday ("02-APR-2035", "ARS"); lh.addStaticHoliday ("01-MAY-2035", "ARS"); lh.addStaticHoliday ("25-MAY-2035", "ARS"); lh.addStaticHoliday ("18-JUN-2035", "ARS"); lh.addStaticHoliday ("09-JUL-2035", "ARS"); lh.addStaticHoliday ("20-AUG-2035", "ARS"); lh.addStaticHoliday ("15-OCT-2035", "ARS"); lh.addStaticHoliday ("06-NOV-2035", "ARS"); lh.addStaticHoliday ("25-DEC-2035", "ARS"); lh.addStaticHoliday ("01-JAN-2036", "ARS"); lh.addStaticHoliday ("24-MAR-2036", "ARS"); lh.addStaticHoliday ("02-APR-2036", "ARS"); lh.addStaticHoliday ("10-APR-2036", "ARS"); lh.addStaticHoliday ("11-APR-2036", "ARS"); lh.addStaticHoliday ("01-MAY-2036", "ARS"); lh.addStaticHoliday ("16-JUN-2036", "ARS"); lh.addStaticHoliday ("09-JUL-2036", "ARS"); lh.addStaticHoliday ("18-AUG-2036", "ARS"); lh.addStaticHoliday ("13-OCT-2036", "ARS"); lh.addStaticHoliday ("06-NOV-2036", "ARS"); lh.addStaticHoliday ("08-DEC-2036", "ARS"); lh.addStaticHoliday ("25-DEC-2036", "ARS"); lh.addStaticHoliday ("01-JAN-2037", "ARS"); lh.addStaticHoliday ("24-MAR-2037", "ARS"); lh.addStaticHoliday ("02-APR-2037", "ARS"); lh.addStaticHoliday ("03-APR-2037", "ARS"); lh.addStaticHoliday ("01-MAY-2037", "ARS"); lh.addStaticHoliday ("25-MAY-2037", "ARS"); lh.addStaticHoliday ("15-JUN-2037", "ARS"); lh.addStaticHoliday ("09-JUL-2037", "ARS"); lh.addStaticHoliday ("17-AUG-2037", "ARS"); lh.addStaticHoliday ("12-OCT-2037", "ARS"); lh.addStaticHoliday ("06-NOV-2037", "ARS"); lh.addStaticHoliday ("08-DEC-2037", "ARS"); lh.addStaticHoliday ("25-DEC-2037", "ARS"); lh.addStaticHoliday ("01-JAN-2038", "ARS"); lh.addStaticHoliday ("24-MAR-2038", "ARS"); lh.addStaticHoliday ("02-APR-2038", "ARS"); lh.addStaticHoliday ("22-APR-2038", "ARS"); lh.addStaticHoliday ("23-APR-2038", "ARS"); lh.addStaticHoliday ("25-MAY-2038", "ARS"); lh.addStaticHoliday ("21-JUN-2038", "ARS"); lh.addStaticHoliday ("09-JUL-2038", "ARS"); lh.addStaticHoliday ("16-AUG-2038", "ARS"); lh.addStaticHoliday ("11-OCT-2038", "ARS"); lh.addStaticHoliday ("08-DEC-2038", "ARS"); lh.addStaticHoliday ("24-MAR-2039", "ARS"); lh.addStaticHoliday ("07-APR-2039", "ARS"); lh.addStaticHoliday ("08-APR-2039", "ARS"); lh.addStaticHoliday ("25-MAY-2039", "ARS"); lh.addStaticHoliday ("20-JUN-2039", "ARS"); lh.addStaticHoliday ("15-AUG-2039", "ARS"); lh.addStaticHoliday ("10-OCT-2039", "ARS"); lh.addStaticHoliday ("08-DEC-2039", "ARS"); lh.addStaticHoliday ("29-MAR-2040", "ARS"); lh.addStaticHoliday ("30-MAR-2040", "ARS"); lh.addStaticHoliday ("02-APR-2040", "ARS"); lh.addStaticHoliday ("01-MAY-2040", "ARS"); lh.addStaticHoliday ("25-MAY-2040", "ARS"); lh.addStaticHoliday ("18-JUN-2040", "ARS"); lh.addStaticHoliday ("09-JUL-2040", "ARS"); lh.addStaticHoliday ("20-AUG-2040", "ARS"); lh.addStaticHoliday ("15-OCT-2040", "ARS"); lh.addStaticHoliday ("06-NOV-2040", "ARS"); lh.addStaticHoliday ("25-DEC-2040", "ARS"); lh.addStaticHoliday ("01-JAN-2041", "ARS"); lh.addStaticHoliday ("02-APR-2041", "ARS"); lh.addStaticHoliday ("18-APR-2041", "ARS"); lh.addStaticHoliday ("19-APR-2041", "ARS"); lh.addStaticHoliday ("01-MAY-2041", "ARS"); lh.addStaticHoliday ("17-JUN-2041", "ARS"); lh.addStaticHoliday ("09-JUL-2041", "ARS"); lh.addStaticHoliday ("19-AUG-2041", "ARS"); lh.addStaticHoliday ("14-OCT-2041", "ARS"); lh.addStaticHoliday ("06-NOV-2041", "ARS"); lh.addStaticHoliday ("25-DEC-2041", "ARS"); lh.addStaticHoliday ("01-JAN-2042", "ARS"); lh.addStaticHoliday ("24-MAR-2042", "ARS"); lh.addStaticHoliday ("02-APR-2042", "ARS"); lh.addStaticHoliday ("03-APR-2042", "ARS"); lh.addStaticHoliday ("04-APR-2042", "ARS"); lh.addStaticHoliday ("01-MAY-2042", "ARS"); lh.addStaticHoliday ("16-JUN-2042", "ARS"); lh.addStaticHoliday ("09-JUL-2042", "ARS"); lh.addStaticHoliday ("18-AUG-2042", "ARS"); lh.addStaticHoliday ("13-OCT-2042", "ARS"); lh.addStaticHoliday ("06-NOV-2042", "ARS"); lh.addStaticHoliday ("08-DEC-2042", "ARS"); lh.addStaticHoliday ("25-DEC-2042", "ARS"); lh.addStaticHoliday ("01-JAN-2043", "ARS"); lh.addStaticHoliday ("24-MAR-2043", "ARS"); lh.addStaticHoliday ("26-MAR-2043", "ARS"); lh.addStaticHoliday ("27-MAR-2043", "ARS"); lh.addStaticHoliday ("02-APR-2043", "ARS"); lh.addStaticHoliday ("01-MAY-2043", "ARS"); lh.addStaticHoliday ("25-MAY-2043", "ARS"); lh.addStaticHoliday ("15-JUN-2043", "ARS"); lh.addStaticHoliday ("09-JUL-2043", "ARS"); lh.addStaticHoliday ("17-AUG-2043", "ARS"); lh.addStaticHoliday ("12-OCT-2043", "ARS"); lh.addStaticHoliday ("06-NOV-2043", "ARS"); lh.addStaticHoliday ("08-DEC-2043", "ARS"); lh.addStaticHoliday ("25-DEC-2043", "ARS"); lh.addStaticHoliday ("01-JAN-2044", "ARS"); lh.addStaticHoliday ("24-MAR-2044", "ARS"); lh.addStaticHoliday ("14-APR-2044", "ARS"); lh.addStaticHoliday ("15-APR-2044", "ARS"); lh.addStaticHoliday ("25-MAY-2044", "ARS"); lh.addStaticHoliday ("20-JUN-2044", "ARS"); lh.addStaticHoliday ("15-AUG-2044", "ARS"); lh.addStaticHoliday ("10-OCT-2044", "ARS"); lh.addStaticHoliday ("08-DEC-2044", "ARS"); lh.addStaticHoliday ("24-MAR-2045", "ARS"); lh.addStaticHoliday ("06-APR-2045", "ARS"); lh.addStaticHoliday ("07-APR-2045", "ARS"); lh.addStaticHoliday ("01-MAY-2045", "ARS"); lh.addStaticHoliday ("25-MAY-2045", "ARS"); lh.addStaticHoliday ("19-JUN-2045", "ARS"); lh.addStaticHoliday ("21-AUG-2045", "ARS"); lh.addStaticHoliday ("16-OCT-2045", "ARS"); lh.addStaticHoliday ("06-NOV-2045", "ARS"); lh.addStaticHoliday ("08-DEC-2045", "ARS"); lh.addStaticHoliday ("25-DEC-2045", "ARS"); lh.addStaticHoliday ("01-JAN-2046", "ARS"); lh.addStaticHoliday ("22-MAR-2046", "ARS"); lh.addStaticHoliday ("23-MAR-2046", "ARS"); lh.addStaticHoliday ("02-APR-2046", "ARS"); lh.addStaticHoliday ("01-MAY-2046", "ARS"); lh.addStaticHoliday ("25-MAY-2046", "ARS"); lh.addStaticHoliday ("18-JUN-2046", "ARS"); lh.addStaticHoliday ("09-JUL-2046", "ARS"); lh.addStaticHoliday ("20-AUG-2046", "ARS"); lh.addStaticHoliday ("15-OCT-2046", "ARS"); lh.addStaticHoliday ("06-NOV-2046", "ARS"); lh.addStaticHoliday ("25-DEC-2046", "ARS"); lh.addStaticHoliday ("01-JAN-2047", "ARS"); lh.addStaticHoliday ("02-APR-2047", "ARS"); lh.addStaticHoliday ("11-APR-2047", "ARS"); lh.addStaticHoliday ("12-APR-2047", "ARS"); lh.addStaticHoliday ("01-MAY-2047", "ARS"); lh.addStaticHoliday ("17-JUN-2047", "ARS"); lh.addStaticHoliday ("09-JUL-2047", "ARS"); lh.addStaticHoliday ("19-AUG-2047", "ARS"); lh.addStaticHoliday ("14-OCT-2047", "ARS"); lh.addStaticHoliday ("06-NOV-2047", "ARS"); lh.addStaticHoliday ("25-DEC-2047", "ARS"); lh.addStaticHoliday ("01-JAN-2048", "ARS"); lh.addStaticHoliday ("24-MAR-2048", "ARS"); lh.addStaticHoliday ("02-APR-2048", "ARS"); lh.addStaticHoliday ("03-APR-2048", "ARS"); lh.addStaticHoliday ("01-MAY-2048", "ARS"); lh.addStaticHoliday ("25-MAY-2048", "ARS"); lh.addStaticHoliday ("15-JUN-2048", "ARS"); lh.addStaticHoliday ("09-JUL-2048", "ARS"); lh.addStaticHoliday ("17-AUG-2048", "ARS"); lh.addStaticHoliday ("12-OCT-2048", "ARS"); lh.addStaticHoliday ("06-NOV-2048", "ARS"); lh.addStaticHoliday ("08-DEC-2048", "ARS"); lh.addStaticHoliday ("25-DEC-2048", "ARS"); lh.addStaticHoliday ("01-JAN-2049", "ARS"); lh.addStaticHoliday ("24-MAR-2049", "ARS"); lh.addStaticHoliday ("02-APR-2049", "ARS"); lh.addStaticHoliday ("15-APR-2049", "ARS"); lh.addStaticHoliday ("16-APR-2049", "ARS"); lh.addStaticHoliday ("25-MAY-2049", "ARS"); lh.addStaticHoliday ("21-JUN-2049", "ARS"); lh.addStaticHoliday ("09-JUL-2049", "ARS"); lh.addStaticHoliday ("16-AUG-2049", "ARS"); lh.addStaticHoliday ("11-OCT-2049", "ARS"); lh.addStaticHoliday ("08-DEC-2049", "ARS"); lh.addStaticHoliday ("24-MAR-2050", "ARS"); lh.addStaticHoliday ("07-APR-2050", "ARS"); lh.addStaticHoliday ("08-APR-2050", "ARS"); lh.addStaticHoliday ("25-MAY-2050", "ARS"); lh.addStaticHoliday ("20-JUN-2050", "ARS"); lh.addStaticHoliday ("15-AUG-2050", "ARS"); lh.addStaticHoliday ("10-OCT-2050", "ARS"); lh.addStaticHoliday ("08-DEC-2050", "ARS"); lh.addStaticHoliday ("24-MAR-2051", "ARS"); lh.addStaticHoliday ("30-MAR-2051", "ARS"); lh.addStaticHoliday ("31-MAR-2051", "ARS"); lh.addStaticHoliday ("01-MAY-2051", "ARS"); lh.addStaticHoliday ("25-MAY-2051", "ARS"); lh.addStaticHoliday ("19-JUN-2051", "ARS"); lh.addStaticHoliday ("21-AUG-2051", "ARS"); lh.addStaticHoliday ("16-OCT-2051", "ARS"); lh.addStaticHoliday ("06-NOV-2051", "ARS"); lh.addStaticHoliday ("08-DEC-2051", "ARS"); lh.addStaticHoliday ("25-DEC-2051", "ARS"); lh.addStaticHoliday ("01-JAN-2052", "ARS"); lh.addStaticHoliday ("02-APR-2052", "ARS"); lh.addStaticHoliday ("18-APR-2052", "ARS"); lh.addStaticHoliday ("19-APR-2052", "ARS"); lh.addStaticHoliday ("01-MAY-2052", "ARS"); lh.addStaticHoliday ("17-JUN-2052", "ARS"); lh.addStaticHoliday ("09-JUL-2052", "ARS"); lh.addStaticHoliday ("19-AUG-2052", "ARS"); lh.addStaticHoliday ("14-OCT-2052", "ARS"); lh.addStaticHoliday ("06-NOV-2052", "ARS"); lh.addStaticHoliday ("25-DEC-2052", "ARS"); lh.addStaticHoliday ("01-JAN-2053", "ARS"); lh.addStaticHoliday ("24-MAR-2053", "ARS"); lh.addStaticHoliday ("02-APR-2053", "ARS"); lh.addStaticHoliday ("03-APR-2053", "ARS"); lh.addStaticHoliday ("04-APR-2053", "ARS"); lh.addStaticHoliday ("01-MAY-2053", "ARS"); lh.addStaticHoliday ("16-JUN-2053", "ARS"); lh.addStaticHoliday ("09-JUL-2053", "ARS"); lh.addStaticHoliday ("18-AUG-2053", "ARS"); lh.addStaticHoliday ("13-OCT-2053", "ARS"); lh.addStaticHoliday ("06-NOV-2053", "ARS"); lh.addStaticHoliday ("08-DEC-2053", "ARS"); lh.addStaticHoliday ("25-DEC-2053", "ARS"); lh.addStaticHoliday ("01-JAN-2054", "ARS"); lh.addStaticHoliday ("24-MAR-2054", "ARS"); lh.addStaticHoliday ("26-MAR-2054", "ARS"); lh.addStaticHoliday ("27-MAR-2054", "ARS"); lh.addStaticHoliday ("02-APR-2054", "ARS"); lh.addStaticHoliday ("01-MAY-2054", "ARS"); lh.addStaticHoliday ("25-MAY-2054", "ARS"); lh.addStaticHoliday ("15-JUN-2054", "ARS"); lh.addStaticHoliday ("09-JUL-2054", "ARS"); lh.addStaticHoliday ("17-AUG-2054", "ARS"); lh.addStaticHoliday ("12-OCT-2054", "ARS"); lh.addStaticHoliday ("06-NOV-2054", "ARS"); lh.addStaticHoliday ("08-DEC-2054", "ARS"); lh.addStaticHoliday ("25-DEC-2054", "ARS"); lh.addStaticHoliday ("01-JAN-2055", "ARS"); lh.addStaticHoliday ("24-MAR-2055", "ARS"); lh.addStaticHoliday ("02-APR-2055", "ARS"); lh.addStaticHoliday ("15-APR-2055", "ARS"); lh.addStaticHoliday ("16-APR-2055", "ARS"); lh.addStaticHoliday ("25-MAY-2055", "ARS"); lh.addStaticHoliday ("21-JUN-2055", "ARS"); lh.addStaticHoliday ("09-JUL-2055", "ARS"); lh.addStaticHoliday ("16-AUG-2055", "ARS"); lh.addStaticHoliday ("11-OCT-2055", "ARS"); lh.addStaticHoliday ("08-DEC-2055", "ARS"); lh.addStaticHoliday ("24-MAR-2056", "ARS"); lh.addStaticHoliday ("30-MAR-2056", "ARS"); lh.addStaticHoliday ("31-MAR-2056", "ARS"); lh.addStaticHoliday ("01-MAY-2056", "ARS"); lh.addStaticHoliday ("25-MAY-2056", "ARS"); lh.addStaticHoliday ("19-JUN-2056", "ARS"); lh.addStaticHoliday ("21-AUG-2056", "ARS"); lh.addStaticHoliday ("16-OCT-2056", "ARS"); lh.addStaticHoliday ("06-NOV-2056", "ARS"); lh.addStaticHoliday ("08-DEC-2056", "ARS"); lh.addStaticHoliday ("25-DEC-2056", "ARS"); lh.addStaticHoliday ("01-JAN-2057", "ARS"); lh.addStaticHoliday ("02-APR-2057", "ARS"); lh.addStaticHoliday ("19-APR-2057", "ARS"); lh.addStaticHoliday ("20-APR-2057", "ARS"); lh.addStaticHoliday ("01-MAY-2057", "ARS"); lh.addStaticHoliday ("25-MAY-2057", "ARS"); lh.addStaticHoliday ("18-JUN-2057", "ARS"); lh.addStaticHoliday ("09-JUL-2057", "ARS"); lh.addStaticHoliday ("20-AUG-2057", "ARS"); lh.addStaticHoliday ("15-OCT-2057", "ARS"); lh.addStaticHoliday ("06-NOV-2057", "ARS"); lh.addStaticHoliday ("25-DEC-2057", "ARS"); lh.addStaticHoliday ("01-JAN-2058", "ARS"); lh.addStaticHoliday ("02-APR-2058", "ARS"); lh.addStaticHoliday ("11-APR-2058", "ARS"); lh.addStaticHoliday ("12-APR-2058", "ARS"); lh.addStaticHoliday ("01-MAY-2058", "ARS"); lh.addStaticHoliday ("17-JUN-2058", "ARS"); lh.addStaticHoliday ("09-JUL-2058", "ARS"); lh.addStaticHoliday ("19-AUG-2058", "ARS"); lh.addStaticHoliday ("14-OCT-2058", "ARS"); lh.addStaticHoliday ("06-NOV-2058", "ARS"); lh.addStaticHoliday ("25-DEC-2058", "ARS"); lh.addStaticHoliday ("01-JAN-2059", "ARS"); lh.addStaticHoliday ("24-MAR-2059", "ARS"); lh.addStaticHoliday ("27-MAR-2059", "ARS"); lh.addStaticHoliday ("28-MAR-2059", "ARS"); lh.addStaticHoliday ("02-APR-2059", "ARS"); lh.addStaticHoliday ("01-MAY-2059", "ARS"); lh.addStaticHoliday ("16-JUN-2059", "ARS"); lh.addStaticHoliday ("09-JUL-2059", "ARS"); lh.addStaticHoliday ("18-AUG-2059", "ARS"); lh.addStaticHoliday ("13-OCT-2059", "ARS"); lh.addStaticHoliday ("06-NOV-2059", "ARS"); lh.addStaticHoliday ("08-DEC-2059", "ARS"); lh.addStaticHoliday ("25-DEC-2059", "ARS"); lh.addStaticHoliday ("01-JAN-2060", "ARS"); lh.addStaticHoliday ("24-MAR-2060", "ARS"); lh.addStaticHoliday ("02-APR-2060", "ARS"); lh.addStaticHoliday ("15-APR-2060", "ARS"); lh.addStaticHoliday ("16-APR-2060", "ARS"); lh.addStaticHoliday ("25-MAY-2060", "ARS"); lh.addStaticHoliday ("21-JUN-2060", "ARS"); lh.addStaticHoliday ("09-JUL-2060", "ARS"); lh.addStaticHoliday ("16-AUG-2060", "ARS"); lh.addStaticHoliday ("11-OCT-2060", "ARS"); lh.addStaticHoliday ("08-DEC-2060", "ARS"); lh.addStaticHoliday ("24-MAR-2061", "ARS"); lh.addStaticHoliday ("07-APR-2061", "ARS"); lh.addStaticHoliday ("08-APR-2061", "ARS"); lh.addStaticHoliday ("25-MAY-2061", "ARS"); lh.addStaticHoliday ("20-JUN-2061", "ARS"); lh.addStaticHoliday ("15-AUG-2061", "ARS"); lh.addStaticHoliday ("10-OCT-2061", "ARS"); lh.addStaticHoliday ("08-DEC-2061", "ARS"); lh.addStandardWeekend(); return lh; } }
apache-2.0
sunpy1106/SpringBeanLifeCycle
src/main/java/org/springframework/scripting/package-info.java
106
/** * * Core interfaces for Spring's scripting support. * */ package org.springframework.scripting;
apache-2.0
sijie/bookkeeper
bookkeeper-server/src/main/java/org/apache/bookkeeper/zookeeper/package-info.java
921
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * A simple retryable zookeeper wrapper provided in bookkeeper. */ package org.apache.bookkeeper.zookeeper;
apache-2.0
andrewvc/elasticsearch
src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java
5977
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.aliases; import org.apache.lucene.queries.FilterClause; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.Filter; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.XBooleanFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.ParsedFilter; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.AliasFilterParsingException; import org.elasticsearch.indices.InvalidAliasNameException; import java.io.IOException; import java.util.Iterator; import java.util.Map; /** * */ public class IndexAliasesService extends AbstractIndexComponent implements Iterable<IndexAlias> { private final IndexQueryParserService indexQueryParser; private final Map<String, IndexAlias> aliases = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); @Inject public IndexAliasesService(Index index, @IndexSettings Settings indexSettings, IndexQueryParserService indexQueryParser) { super(index, indexSettings); this.indexQueryParser = indexQueryParser; } public boolean hasAlias(String alias) { return aliases.containsKey(alias); } public IndexAlias alias(String alias) { return aliases.get(alias); } public IndexAlias create(String alias, @Nullable CompressedString filter) { return new IndexAlias(alias, filter, parse(alias, filter)); } public void add(String alias, @Nullable CompressedString filter) { add(new IndexAlias(alias, filter, parse(alias, filter))); } public void addAll(Map<String, IndexAlias> aliases) { this.aliases.putAll(aliases); } /** * Returns the filter associated with listed filtering aliases. * <p/> * <p>The list of filtering aliases should be obtained by calling MetaData.filteringAliases. * Returns <tt>null</tt> if no filtering is required.</p> */ public Filter aliasFilter(String... aliases) { if (aliases == null || aliases.length == 0) { return null; } if (aliases.length == 1) { IndexAlias indexAlias = alias(aliases[0]); if (indexAlias == null) { // This shouldn't happen unless alias disappeared after filteringAliases was called. throw new InvalidAliasNameException(index, aliases[0], "Unknown alias name was passed to alias Filter"); } return indexAlias.parsedFilter(); } else { // we need to bench here a bit, to see maybe it makes sense to use OrFilter XBooleanFilter combined = new XBooleanFilter(); for (String alias : aliases) { IndexAlias indexAlias = alias(alias); if (indexAlias == null) { // This shouldn't happen unless alias disappeared after filteringAliases was called. throw new InvalidAliasNameException(index, aliases[0], "Unknown alias name was passed to alias Filter"); } if (indexAlias.parsedFilter() != null) { combined.add(new FilterClause(indexAlias.parsedFilter(), BooleanClause.Occur.SHOULD)); } else { // The filter might be null only if filter was removed after filteringAliases was called return null; } } if (combined.clauses().size() == 0) { return null; } if (combined.clauses().size() == 1) { return combined.clauses().get(0).getFilter(); } return combined; } } private void add(IndexAlias indexAlias) { aliases.put(indexAlias.alias(), indexAlias); } public void remove(String alias) { aliases.remove(alias); } private Filter parse(String alias, CompressedString filter) { if (filter == null) { return null; } try { byte[] filterSource = filter.uncompressed(); XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource); try { ParsedFilter parsedFilter = indexQueryParser.parseInnerFilter(parser); return parsedFilter == null ? null : parsedFilter.filter(); } finally { parser.close(); } } catch (IOException ex) { throw new AliasFilterParsingException(index, alias, "Invalid alias filter", ex); } } @Override public Iterator<IndexAlias> iterator() { return aliases.values().iterator(); } }
apache-2.0
meteorcloudy/bazel
src/main/java/com/google/devtools/build/lib/buildtool/CqueryBuildTool.java
2766
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildtool; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.query2.PostAnalysisQueryEnvironment.TopLevelConfigurations; import com.google.devtools.build.lib.query2.cquery.ConfiguredTargetQueryEnvironment; import com.google.devtools.build.lib.query2.cquery.CqueryOptions; import com.google.devtools.build.lib.query2.cquery.KeyedConfiguredTarget; import com.google.devtools.build.lib.query2.engine.QueryEnvironment.QueryFunction; import com.google.devtools.build.lib.query2.engine.QueryExpression; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.WalkableGraph; import java.util.Collection; /** A version of {@link BuildTool} that handles all cquery work. */ public final class CqueryBuildTool extends PostAnalysisQueryBuildTool<KeyedConfiguredTarget> { public CqueryBuildTool(CommandEnvironment env, QueryExpression queryExpression) { super(env, queryExpression); } @Override protected ConfiguredTargetQueryEnvironment getQueryEnvironment( BuildRequest request, BuildConfiguration hostConfiguration, TopLevelConfigurations configurations, Collection<SkyKey> transitiveConfigurationKeys, WalkableGraph walkableGraph) throws InterruptedException { ImmutableList<QueryFunction> extraFunctions = new ImmutableList.Builder<QueryFunction>() .addAll(ConfiguredTargetQueryEnvironment.CQUERY_FUNCTIONS) .addAll(env.getRuntime().getQueryFunctions()) .build(); CqueryOptions cqueryOptions = request.getOptions(CqueryOptions.class); return new ConfiguredTargetQueryEnvironment( request.getKeepGoing(), env.getReporter(), extraFunctions, configurations, hostConfiguration, transitiveConfigurationKeys, env.getRelativeWorkingDirectory(), env.getPackageManager().getPackagePath(), () -> walkableGraph, cqueryOptions); } }
apache-2.0
apache/olingo-odata2
odata2-lib/odata-core/src/test/java/org/apache/olingo/odata2/core/edm/provider/EdmNamedImplProvTest.java
4308
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ******************************************************************************/ package org.apache.olingo.odata2.core.edm.provider; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import org.apache.olingo.odata2.api.edm.EdmException; import org.apache.olingo.odata2.api.edm.EdmSimpleTypeKind; import org.apache.olingo.odata2.api.edm.provider.EdmProvider; import org.apache.olingo.odata2.api.edm.provider.SimpleProperty; import org.apache.olingo.odata2.testutil.fit.BaseTest; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class EdmNamedImplProvTest extends BaseTest { @Rule public ExpectedException expectedEx = ExpectedException.none(); @Test(expected = EdmException.class) public void testPropertySimple() throws Exception { EdmProvider edmProvider = mock(EdmProvider.class); EdmImplProv edmImplProv = new EdmImplProv(edmProvider); SimpleProperty propertySimple = new SimpleProperty().setName("Prop;ertyName").setType(EdmSimpleTypeKind.String); new EdmSimplePropertyImplProv(edmImplProv, propertySimple); } @Test(expected = EdmException.class) public void testPropertyIllegalStartWithNumber() throws Exception { EdmProvider edmProvider = mock(EdmProvider.class); EdmImplProv edmImplProv = new EdmImplProv(edmProvider); SimpleProperty propertySimple = new SimpleProperty().setName("1_PropertyName").setType(EdmSimpleTypeKind.String); new EdmSimplePropertyImplProv(edmImplProv, propertySimple); expectedEx.expect(RuntimeException.class); expectedEx.expectMessage("'Prop;ertyName' name pattern not valid."); } @Test public void testPropertyWithNumber() throws Exception { EdmProvider edmProvider = mock(EdmProvider.class); EdmImplProv edmImplProv = new EdmImplProv(edmProvider); SimpleProperty propertySimple = new SimpleProperty().setName("Prop_1_Name").setType(EdmSimpleTypeKind .String); new EdmSimplePropertyImplProv(edmImplProv, propertySimple); assertEquals("Prop_1_Name", new EdmSimplePropertyImplProv(edmImplProv, propertySimple).getName()); } @Test public void testPropertyUmlaut() throws Exception { EdmProvider edmProvider = mock(EdmProvider.class); EdmImplProv edmImplProv = new EdmImplProv(edmProvider); SimpleProperty propertySimple = new SimpleProperty().setName("ÄropertyName").setType(EdmSimpleTypeKind.String); assertEquals("ÄropertyName", new EdmSimplePropertyImplProv(edmImplProv, propertySimple).getName()); } @Test public void testPropertyUnicode() throws Exception { EdmProvider edmProvider = mock(EdmProvider.class); EdmImplProv edmImplProv = new EdmImplProv(edmProvider); SimpleProperty propertySimple = new SimpleProperty().setName("\u00C0roperty\u00C1ame\u00C0\u00D5\u00D6") .setType(EdmSimpleTypeKind.String); assertEquals("ÀropertyÁameÀÕÖ", new EdmSimplePropertyImplProv(edmImplProv, propertySimple).getName()); } @Test public void testPropertyUnicodeTwo() throws Exception { EdmProvider edmProvider = mock(EdmProvider.class); EdmImplProv edmImplProv = new EdmImplProv(edmProvider); SimpleProperty propertySimple = new SimpleProperty().setName("Содержание") .setType(EdmSimpleTypeKind.String); assertEquals("Содержание", new EdmSimplePropertyImplProv(edmImplProv, propertySimple).getName()); } }
apache-2.0
shroman/ignite
modules/zookeeper/src/main/java/org/apache/ignite/spi/discovery/zk/internal/ZookeeperClient.java
38713
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi.discovery.zk.internal; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.IgniteLogger; import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteRunnable; import org.apache.zookeeper.AsyncCallback; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.Op; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Stat; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.IgniteSystemProperties.IGNITE_ZOOKEEPER_DISCOVERY_MAX_RETRY_COUNT; import static org.apache.ignite.IgniteSystemProperties.IGNITE_ZOOKEEPER_DISCOVERY_RETRY_TIMEOUT; /** * Zookeeper Client. */ public class ZookeeperClient implements Watcher { /** */ private static final int DFLT_RETRY_TIMEOUT = 2000; /** */ private static final int DFLT_MAX_RETRY_COUNT = 10; /** */ private static final boolean PINGER_ENABLED = IgniteSystemProperties.getBoolean("IGNITE_ZOOKEEPER_DISCOVERY_PINGER_ENABLED", false); /** */ private final AtomicInteger retryCount = new AtomicInteger(); /** */ private static final int MAX_REQ_SIZE = 1048528; /** */ private static final List<ACL> ZK_ACL = ZooDefs.Ids.OPEN_ACL_UNSAFE; /** */ private static final byte[] EMPTY_BYTES = {}; /** */ private final ZooKeeper zk; /** */ private final IgniteLogger log; /** */ private ConnectionState state = ConnectionState.Disconnected; /** */ private long connLossTimeout; /** */ private volatile long connStartTime; /** */ private final Object stateMux = new Object(); /** */ private final IgniteRunnable connLostC; /** */ private final Timer connTimer; /** */ private final ArrayDeque<ZkAsyncOperation> retryQ = new ArrayDeque<>(); /** */ private volatile boolean closing; /** */ private volatile ZkPinger pinger; /** * @param log Logger. * @param connectString ZK connection string. * @param sesTimeout ZK session timeout. * @param connLostC Lost connection callback. * @throws Exception If failed. */ ZookeeperClient(IgniteLogger log, String connectString, int sesTimeout, IgniteRunnable connLostC) throws Exception { this(null, log, connectString, sesTimeout, connLostC); } /** * @param igniteInstanceName Ignite instance name. * @param log Logger. * @param connectString ZK connection string. * @param sesTimeout ZK session timeout. * @param connLostC Lost connection callback. * @throws Exception If failed. */ ZookeeperClient(String igniteInstanceName, IgniteLogger log, String connectString, int sesTimeout, IgniteRunnable connLostC) throws Exception { this.log = log.getLogger(getClass()); this.connLostC = connLostC; connLossTimeout = sesTimeout; long connStartTime = this.connStartTime = System.currentTimeMillis(); connTimer = new Timer("zk-client-timer-" + igniteInstanceName); String threadName = Thread.currentThread().getName(); // ZK generates internal threads' names using current thread name. Thread.currentThread().setName("zk-" + igniteInstanceName); try { zk = new ZooKeeper(connectString, sesTimeout, this); } finally { Thread.currentThread().setName(threadName); } synchronized (stateMux) { if (connStartTime == this.connStartTime && state == ConnectionState.Disconnected) scheduleConnectionCheck(); } } /** * @return Zookeeper client. */ ZooKeeper zk() { return zk; } /** * @return {@code True} if connected to ZooKeeper. */ boolean connected() { synchronized (stateMux) { return state == ConnectionState.Connected; } } /** * @return {@code True} if pinger is enabled */ boolean pingerEnabled() { return PINGER_ENABLED; } /** */ String state() { synchronized (stateMux) { return state.toString(); } } /** {@inheritDoc} */ @Override public void process(WatchedEvent evt) { if (closing) return; if (evt.getType() == Event.EventType.None) { ConnectionState newState; synchronized (stateMux) { if (state == ConnectionState.Lost) { U.warn(log, "Received event after connection was lost [evtState=" + evt.getState() + "]"); return; } if (!zk.getState().isAlive()) return; Event.KeeperState zkState = evt.getState(); switch (zkState) { case SaslAuthenticated: return; // No-op. case AuthFailed: newState = state; break; case Disconnected: newState = ConnectionState.Disconnected; break; case SyncConnected: newState = ConnectionState.Connected; break; case Expired: U.warn(log, "Session expired, changing state to Lost"); newState = ConnectionState.Lost; break; default: U.error(log, "Unexpected state for ZooKeeper client, close connection: " + zkState); newState = ConnectionState.Lost; } if (newState != state) { if (log.isInfoEnabled()) log.info("ZooKeeper client state changed [prevState=" + state + ", newState=" + newState + ']'); state = newState; if (newState == ConnectionState.Disconnected) { connStartTime = System.currentTimeMillis(); scheduleConnectionCheck(); } else if (newState == ConnectionState.Connected) { retryCount.set(0); stateMux.notifyAll(); } else assert state == ConnectionState.Lost : state; } else return; } if (newState == ConnectionState.Lost) { closeClient(); notifyConnectionLost(); } else if (newState == ConnectionState.Connected) { for (ZkAsyncOperation op : retryQ) op.execute(); } } } /** * */ private void notifyConnectionLost() { if (!closing && state == ConnectionState.Lost && connLostC != null) connLostC.run(); connTimer.cancel(); } /** * @param path Path. * @return {@code True} if node exists. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ boolean exists(String path) throws ZookeeperClientFailedException, InterruptedException { for (;;) { long connStartTime = this.connStartTime; try { return zk.exists(path, false) != null; } catch (Exception e) { onZookeeperError(connStartTime, e); } } } /** * @param paths Paths to create. * @param createMode Create mode. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ void createAll(List<String> paths, CreateMode createMode) throws ZookeeperClientFailedException, InterruptedException { if (paths.isEmpty()) return; List<List<Op>> batches = new LinkedList<>(); int batchSize = 0; List<Op> batch = new LinkedList<>(); for (String path : paths) { //TODO ZK: https://issues.apache.org/jira/browse/IGNITE-8187 int size = requestOverhead(path) + 48 /* overhead */; assert size <= MAX_REQ_SIZE; if (batchSize + size > MAX_REQ_SIZE) { batches.add(batch); batch = new LinkedList<>(); batchSize = 0; } batch.add(Op.create(path, EMPTY_BYTES, ZK_ACL, createMode)); batchSize += size; } batches.add(batch); for (List<Op> ops : batches) { for (;;) { long connStartTime = this.connStartTime; try { zk.multi(ops); break; } catch (KeeperException.NodeExistsException e) { if (log.isDebugEnabled()) log.debug("Failed to create nodes using bulk operation: " + e); for (Op op : ops) createIfNeeded(op.getPath(), null, createMode); break; } catch (Exception e) { onZookeeperError(connStartTime, e); } } } } /** * @param path Path. * @param data Data. * @param overhead Extra overhead. * @return {@code True} If data size exceeds max request size and should be splitted into multiple parts. */ boolean needSplitNodeData(String path, byte[] data, int overhead) { return requestOverhead(path) + data.length + overhead > MAX_REQ_SIZE; } /** * @param path Path. * @param data Data. * @param overhead Extra overhead. * @return Splitted data. */ List<byte[]> splitNodeData(String path, byte[] data, int overhead) { int partSize = MAX_REQ_SIZE - requestOverhead(path) - overhead; int partCnt = data.length / partSize; if (data.length % partSize != 0) partCnt++; assert partCnt > 1 : "Do not need split"; List<byte[]> parts = new ArrayList<>(partCnt); int remaining = data.length; for (int i = 0; i < partCnt; i++) { int partSize0 = Math.min(remaining, partSize); byte[] part = new byte[partSize0]; System.arraycopy(data, i * partSize, part, 0, part.length); remaining -= partSize0; parts.add(part); } assert remaining == 0 : remaining; return parts; } /** * TODO ZK: https://issues.apache.org/jira/browse/IGNITE-8187 * @param path Request path. * @return Marshalled request overhead. */ private int requestOverhead(String path) { return path.length(); } /** * @param path Path. * @param data Data. * @param createMode Create mode. * @return Created path. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ String createIfNeeded(String path, byte[] data, CreateMode createMode) throws ZookeeperClientFailedException, InterruptedException { assert !createMode.isSequential() : createMode; if (data == null) data = EMPTY_BYTES; for (;;) { long connStartTime = this.connStartTime; try { return zk.create(path, data, ZK_ACL, createMode); } catch (KeeperException.NodeExistsException e) { if (log.isDebugEnabled()) log.debug("Node already exists: " + path); return path; } catch (Exception e) { onZookeeperError(connStartTime, e); } } } /** * @param checkPrefix Unique prefix to check in case of retry. * @param parentPath Parent node path. * @param path Node to create. * @param data Node data. * @param createMode Create mode. * @return Create path. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ String createSequential(String checkPrefix, String parentPath, String path, byte[] data, CreateMode createMode) throws ZookeeperClientFailedException, InterruptedException { assert createMode.isSequential() : createMode; if (data == null) data = EMPTY_BYTES; boolean first = true; for (;;) { long connStartTime = this.connStartTime; try { if (!first) { List<String> children = zk.getChildren(parentPath, false); for (int i = 0; i < children.size(); i++) { String child = children.get(i); if (children.get(i).startsWith(checkPrefix)) { String resPath = parentPath + "/" + child; if (log.isDebugEnabled()) log.debug("Check before retry, node already created: " + resPath); return resPath; } } } return zk.create(path, data, ZK_ACL, createMode); } catch (KeeperException.NodeExistsException e) { assert !createMode.isSequential() : createMode; if (log.isDebugEnabled()) log.debug("Node already exists: " + path); return path; } catch (Exception e) { onZookeeperError(connStartTime, e); } first = false; } } /** * @param path Path. * @return Children nodes. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ List<String> getChildren(String path) throws ZookeeperClientFailedException, InterruptedException { for (;;) { long connStartTime = this.connStartTime; try { return zk.getChildren(path, false); } catch (Exception e) { onZookeeperError(connStartTime, e); } } } /** * Get children paths. * * @param path Path. * @return Children paths. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ List<String> getChildrenPaths(String path) throws ZookeeperClientFailedException, InterruptedException { List<String> children = getChildren(path); ArrayList<String> paths = new ArrayList(children.size()); for (String child : children) paths.add(path + "/" + child); return paths; } /** * @param path Path. * @throws InterruptedException If interrupted. * @throws KeeperException In case of error. * @return {@code True} if given path exists. */ boolean existsNoRetry(String path) throws InterruptedException, KeeperException { return zk.exists(path, false) != null; } /** * @param path Path. * @param ver Expected version. * @throws InterruptedException If interrupted. * @throws KeeperException In case of error. */ void deleteIfExistsNoRetry(String path, int ver) throws InterruptedException, KeeperException { try { zk.delete(path, ver); } catch (KeeperException.NoNodeException e) { // No-op if znode does not exist. } } /** * @param path Path. * @param ver Version. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ void deleteIfExists(String path, int ver) throws ZookeeperClientFailedException, InterruptedException { try { delete(path, ver); } catch (KeeperException.NoNodeException e) { // No-op if znode does not exist. } } /** * @param paths Children paths. * @param ver Version. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ void deleteAll(List<String> paths, int ver) throws ZookeeperClientFailedException, InterruptedException { if (paths.isEmpty()) return; List<List<Op>> batches = new LinkedList<>(); int batchSize = 0; List<Op> batch = new LinkedList<>(); for (String path : paths) { //TODO ZK: https://issues.apache.org/jira/browse/IGNITE-8187 int size = requestOverhead(path) + 17 /* overhead */; assert size <= MAX_REQ_SIZE; if (batchSize + size > MAX_REQ_SIZE) { batches.add(batch); batch = new LinkedList<>(); batchSize = 0; } batch.add(Op.delete(path, ver)); batchSize += size; } batches.add(batch); for (List<Op> ops : batches) { for (;;) { long connStartTime = this.connStartTime; try { zk.multi(ops); break; } catch (KeeperException.NoNodeException e) { if (log.isDebugEnabled()) log.debug("Failed to delete nodes using bulk operation: " + e); for (Op op : ops) deleteIfExists(op.getPath(), ver); break; } catch (Exception e) { onZookeeperError(connStartTime, e); } } } } /** * @param path Path. * @param ver Version. * @throws KeeperException.NoNodeException If target node does not exist. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ private void delete(String path, int ver) throws KeeperException.NoNodeException, ZookeeperClientFailedException, InterruptedException { for (;;) { long connStartTime = this.connStartTime; try { zk.delete(path, ver); return; } catch (KeeperException.NoNodeException e) { throw e; } catch (Exception e) { onZookeeperError(connStartTime, e); } } } /** * @param path Path. * @param data Data. * @param ver Version. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. * @throws KeeperException.NoNodeException If node does not exist. * @throws KeeperException.BadVersionException If version does not match. */ void setData(String path, byte[] data, int ver) throws ZookeeperClientFailedException, InterruptedException, KeeperException.NoNodeException, KeeperException.BadVersionException { if (data == null) data = EMPTY_BYTES; for (;;) { long connStartTime = this.connStartTime; try { zk.setData(path, data, ver); return; } catch (KeeperException.BadVersionException | KeeperException.NoNodeException e) { throw e; } catch (Exception e) { onZookeeperError(connStartTime, e); } } } /** * @param path Path. * @param stat Optional {@link Stat} instance to return znode state. * @return Data. * @throws KeeperException.NoNodeException If target node does not exist. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ byte[] getData(String path, @Nullable Stat stat) throws KeeperException.NoNodeException, ZookeeperClientFailedException, InterruptedException { for (;;) { long connStartTime = this.connStartTime; try { return zk.getData(path, false, stat); } catch (KeeperException.NoNodeException e) { throw e; } catch (Exception e) { onZookeeperError(connStartTime, e); } } } /** * @param path Path. * @return Data. * @throws KeeperException.NoNodeException If target node does not exist. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ byte[] getData(String path) throws KeeperException.NoNodeException, ZookeeperClientFailedException, InterruptedException { return getData(path, null); } /** * @param path Path. */ void deleteIfExistsAsync(String path) { new DeleteIfExistsOperation(path).execute(); } /** * @param path Path. * @param watcher Watcher. * @param cb Callback. */ void existsAsync(String path, Watcher watcher, AsyncCallback.StatCallback cb) { ExistsOperation op = new ExistsOperation(path, watcher, cb); zk.exists(path, watcher, new StatCallbackWrapper(op), null); } /** * @param path Path. * @param watcher Watcher. * @param cb Callback. */ void getChildrenAsync(String path, Watcher watcher, AsyncCallback.Children2Callback cb) { GetChildrenOperation op = new GetChildrenOperation(path, watcher, cb); zk.getChildren(path, watcher, new ChildrenCallbackWrapper(op), null); } /** * @param path Path. * @param watcher Watcher. * @param cb Callback. */ void getDataAsync(String path, Watcher watcher, AsyncCallback.DataCallback cb) { GetDataOperation op = new GetDataOperation(path, watcher, cb); zk.getData(path, watcher, new DataCallbackWrapper(op), null); } /** * @param path Path. * @param data Data. * @param createMode Create mode. * @param cb Callback. */ private void createAsync(String path, byte[] data, CreateMode createMode, AsyncCallback.StringCallback cb) { if (data == null) data = EMPTY_BYTES; CreateOperation op = new CreateOperation(path, data, createMode, cb); zk.create(path, data, ZK_ACL, createMode, new CreateCallbackWrapper(op), null); } /** * */ void onCloseStart() { closing = true; synchronized (stateMux) { stateMux.notifyAll(); } } /** * */ public void close() { if (PINGER_ENABLED) { ZkPinger pinger0 = pinger; if (pinger0 != null) pinger0.stop(); } closeClient(); } /** * @param prevConnStartTime Time when connection was established. * @param e Error. * @throws ZookeeperClientFailedException If connection to zk was lost. * @throws InterruptedException If interrupted. */ private void onZookeeperError(long prevConnStartTime, Exception e) throws ZookeeperClientFailedException, InterruptedException { ZookeeperClientFailedException err = null; synchronized (stateMux) { if (closing) throw new ZookeeperClientFailedException("ZooKeeper client is closed."); U.warn(log, "Failed to execute ZooKeeper operation [err=" + e + ", state=" + state + ']'); if (state == ConnectionState.Lost) { U.error(log, "Operation failed with unexpected error, connection lost: " + e, e); throw new ZookeeperClientFailedException(e); } boolean retry = (e instanceof KeeperException) && needRetry(((KeeperException)e).code().intValue()); if (retry) { long remainingTime; if (state == ConnectionState.Connected && connStartTime == prevConnStartTime) { state = ConnectionState.Disconnected; connStartTime = System.currentTimeMillis(); remainingTime = connLossTimeout; } else { assert connStartTime != 0; assert state == ConnectionState.Disconnected : state; remainingTime = connLossTimeout - (System.currentTimeMillis() - connStartTime); if (remainingTime <= 0) { state = ConnectionState.Lost; U.warn(log, "Failed to establish ZooKeeper connection, close client " + "[timeout=" + connLossTimeout + ']'); err = new ZookeeperClientFailedException(e); } } if (err == null) { long retryTimeout = IgniteSystemProperties.getLong(IGNITE_ZOOKEEPER_DISCOVERY_RETRY_TIMEOUT, DFLT_RETRY_TIMEOUT); U.warn(log, "ZooKeeper operation failed, will retry [err=" + e + ", retryTimeout=" + retryTimeout + ", connLossTimeout=" + connLossTimeout + ", path=" + ((KeeperException)e).getPath() + ", remainingWaitTime=" + remainingTime + ']'); stateMux.wait(retryTimeout); if (closing) throw new ZookeeperClientFailedException("ZooKeeper client is closed."); } } else { U.error(log, "Operation failed with unexpected error, close ZooKeeper client: " + e, e); state = ConnectionState.Lost; err = new ZookeeperClientFailedException(e); } } if (err != null) { closeClient(); notifyConnectionLost(); throw err; } } /** * @param code Zookeeper error code. * @return {@code True} if can retry operation. */ private boolean needRetry(int code) { boolean retryByErrorCode = code == KeeperException.Code.CONNECTIONLOSS.intValue() || code == KeeperException.Code.SESSIONMOVED.intValue() || code == KeeperException.Code.OPERATIONTIMEOUT.intValue(); if (retryByErrorCode) { int maxRetryCount = IgniteSystemProperties.getInteger(IGNITE_ZOOKEEPER_DISCOVERY_MAX_RETRY_COUNT, DFLT_MAX_RETRY_COUNT); if (maxRetryCount <= 0 || retryCount.incrementAndGet() < maxRetryCount) return true; else return false; } else return false; } /** * */ private void closeClient() { try { zk.close(); } catch (Exception closeErr) { U.warn(log, "Failed to close ZooKeeper client: " + closeErr, closeErr); } connTimer.cancel(); } /** * */ private void scheduleConnectionCheck() { assert state == ConnectionState.Disconnected : state; connTimer.schedule(new ConnectionTimeoutTask(connStartTime), connLossTimeout); } /** * @param pinger Pinger. */ void attachPinger(ZkPinger pinger) { if (PINGER_ENABLED) this.pinger = pinger; } /** * */ interface ZkAsyncOperation { /** * */ void execute(); } /** * */ class GetChildrenOperation implements ZkAsyncOperation { /** */ private final String path; /** */ private final Watcher watcher; /** */ private final AsyncCallback.Children2Callback cb; /** * @param path Path. * @param watcher Watcher. * @param cb Callback. */ GetChildrenOperation(String path, Watcher watcher, AsyncCallback.Children2Callback cb) { this.path = path; this.watcher = watcher; this.cb = cb; } /** {@inheritDoc} */ @Override public void execute() { getChildrenAsync(path, watcher, cb); } } /** * */ class GetDataOperation implements ZkAsyncOperation { /** */ private final String path; /** */ private final Watcher watcher; /** */ private final AsyncCallback.DataCallback cb; /** * @param path Path. * @param watcher Watcher. * @param cb Callback. */ GetDataOperation(String path, Watcher watcher, AsyncCallback.DataCallback cb) { this.path = path; this.watcher = watcher; this.cb = cb; } /** {@inheritDoc} */ @Override public void execute() { getDataAsync(path, watcher, cb); } } /** * */ class ExistsOperation implements ZkAsyncOperation { /** */ private final String path; /** */ private final Watcher watcher; /** */ private final AsyncCallback.StatCallback cb; /** * @param path Path. * @param watcher Watcher. * @param cb Callback. */ ExistsOperation(String path, Watcher watcher, AsyncCallback.StatCallback cb) { this.path = path; this.watcher = watcher; this.cb = cb; } /** {@inheritDoc} */ @Override public void execute() { existsAsync(path, watcher, cb); } } /** * */ class CreateOperation implements ZkAsyncOperation { /** */ private final String path; /** */ private final byte[] data; /** */ private final CreateMode createMode; /** */ private final AsyncCallback.StringCallback cb; /** * @param path path. * @param data Data. * @param createMode Create mode. * @param cb Callback. */ CreateOperation(String path, byte[] data, CreateMode createMode, AsyncCallback.StringCallback cb) { this.path = path; this.data = data; this.createMode = createMode; this.cb = cb; } /** {@inheritDoc} */ @Override public void execute() { createAsync(path, data, createMode, cb); } } /** * */ class DeleteIfExistsOperation implements AsyncCallback.VoidCallback, ZkAsyncOperation { /** */ private final String path; /** * @param path Path. */ DeleteIfExistsOperation(String path) { this.path = path; } /** {@inheritDoc} */ @Override public void execute() { zk.delete(path, -1, this, null); } /** {@inheritDoc} */ @Override public void processResult(int rc, String path, Object ctx) { if (closing) return; if (rc == KeeperException.Code.NONODE.intValue()) return; if (needRetry(rc)) { U.warn(log, "Failed to execute async operation, connection lost. Will retry after connection restore [" + "path=" + path + ']'); retryQ.add(this); } else if (rc == KeeperException.Code.SESSIONEXPIRED.intValue()) U.warn(log, "Failed to execute async operation, connection lost [path=" + path + ']'); else assert rc == 0 : KeeperException.Code.get(rc); } } /** * */ class CreateCallbackWrapper implements AsyncCallback.StringCallback { /** */ final CreateOperation op; /** * @param op Operation. */ CreateCallbackWrapper(CreateOperation op) { this.op = op; } @Override public void processResult(int rc, String path, Object ctx, String name) { if (closing) return; if (rc == KeeperException.Code.NODEEXISTS.intValue()) return; if (needRetry(rc)) { U.warn(log, "Failed to execute async operation, connection lost. Will retry after connection restore [path=" + path + ']'); retryQ.add(op); } else if (rc == KeeperException.Code.SESSIONEXPIRED.intValue()) U.warn(log, "Failed to execute async operation, connection lost [path=" + path + ']'); else { if (op.cb != null) op.cb.processResult(rc, path, ctx, name); } } } /** * */ class ChildrenCallbackWrapper implements AsyncCallback.Children2Callback { /** */ private final GetChildrenOperation op; /** * @param op Operation. */ private ChildrenCallbackWrapper(GetChildrenOperation op) { this.op = op; } /** {@inheritDoc} */ @Override public void processResult(int rc, String path, Object ctx, List<String> children, Stat stat) { if (closing) return; if (needRetry(rc)) { U.warn(log, "Failed to execute async operation, connection lost. Will retry after connection restore [path=" + path + ']'); retryQ.add(op); } else if (rc == KeeperException.Code.SESSIONEXPIRED.intValue()) U.warn(log, "Failed to execute async operation, connection lost [path=" + path + ']'); else op.cb.processResult(rc, path, ctx, children, stat); } } /** * */ class DataCallbackWrapper implements AsyncCallback.DataCallback { /** */ private final GetDataOperation op; /** * @param op Operation. */ private DataCallbackWrapper(GetDataOperation op) { this.op = op; } /** {@inheritDoc} */ @Override public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) { if (closing) return; if (needRetry(rc)) { U.warn(log, "Failed to execute async operation, connection lost. Will retry after connection restore [path=" + path + ']'); retryQ.add(op); } else if (rc == KeeperException.Code.SESSIONEXPIRED.intValue()) U.warn(log, "Failed to execute async operation, connection lost [path=" + path + ']'); else op.cb.processResult(rc, path, ctx, data, stat); } } /** * */ class StatCallbackWrapper implements AsyncCallback.StatCallback { /** */ private final ExistsOperation op; /** * @param op Operation. */ private StatCallbackWrapper(ExistsOperation op) { this.op = op; } /** {@inheritDoc} */ @Override public void processResult(int rc, String path, Object ctx, Stat stat) { if (closing) return; if (needRetry(rc)) { U.warn(log, "Failed to execute async operation, connection lost. Will retry after connection restore [path=" + path + ']'); retryQ.add(op); } else if (rc == KeeperException.Code.SESSIONEXPIRED.intValue()) U.warn(log, "Failed to execute async operation, connection lost [path=" + path + ']'); else op.cb.processResult(rc, path, ctx, stat); } } /** * */ private class ConnectionTimeoutTask extends TimerTask { /** */ private final long connectStartTime; /** * @param connectStartTime Time was connection started. */ ConnectionTimeoutTask(long connectStartTime) { this.connectStartTime = connectStartTime; } /** {@inheritDoc} */ @Override public void run() { boolean connLoss = false; synchronized (stateMux) { if (closing) return; if (state == ConnectionState.Disconnected && ZookeeperClient.this.connStartTime == connectStartTime) { state = ConnectionState.Lost; U.warn(log, "Failed to establish ZooKeeper connection, close client " + "[timeout=" + connLossTimeout + ']'); connLoss = true; } } if (connLoss) { closeClient(); notifyConnectionLost(); } } } /** * */ private enum ConnectionState { /** */ Connected, /** */ Disconnected, /** */ Lost } }
apache-2.0
tombujok/hazelcast
hazelcast/src/test/java/com/hazelcast/internal/partition/impl/PartitionReplicaStateCheckerTest.java
12515
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.partition.impl; import com.hazelcast.cluster.ClusterState; import com.hazelcast.config.Config; import com.hazelcast.config.ServiceConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.internal.partition.service.TestMigrationAwareService; import com.hazelcast.internal.partition.service.TestPutOperation; import com.hazelcast.nio.Address; import com.hazelcast.spi.NodeEngine; import com.hazelcast.spi.Operation; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.net.UnknownHostException; import java.util.Arrays; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import static com.hazelcast.internal.cluster.impl.AdvancedClusterStateTest.changeClusterStateEventually; import static com.hazelcast.internal.partition.AntiEntropyCorrectnessTest.setBackupPacketDropFilter; import static org.junit.Assert.assertEquals; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class PartitionReplicaStateCheckerTest extends HazelcastTestSupport { @Test public void shouldBeSafe_whenNotInitialized() { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(); InternalPartitionServiceImpl partitionService = getNode(hz).partitionService; PartitionReplicaStateChecker replicaStateChecker = partitionService.getPartitionReplicaStateChecker(); PartitionServiceState state = replicaStateChecker.getPartitionServiceState(); assertEquals(PartitionServiceState.SAFE, state); } @Test public void shouldBeSafe_whenInitializedOnMaster() { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(); InternalPartitionServiceImpl partitionService = getNode(hz).partitionService; partitionService.firstArrangement(); PartitionReplicaStateChecker replicaStateChecker = partitionService.getPartitionReplicaStateChecker(); PartitionServiceState state = replicaStateChecker.getPartitionServiceState(); assertEquals(PartitionServiceState.SAFE, state); } @Test public void shouldNotBeSafe_whenMissingReplicasPresent() { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(); InternalPartitionServiceImpl partitionService = getNode(hz).partitionService; partitionService.firstArrangement(); PartitionStateManager partitionStateManager = partitionService.getPartitionStateManager(); InternalPartitionImpl partition = partitionStateManager.getPartitionImpl(0); Address[] replicaAddresses = partition.getReplicaAddresses(); partition.setReplicaAddresses(new Address[replicaAddresses.length]); PartitionReplicaStateChecker replicaStateChecker = partitionService.getPartitionReplicaStateChecker(); assertEquals(PartitionServiceState.REPLICA_NOT_OWNED, replicaStateChecker.getPartitionServiceState()); partition.setReplicaAddresses(replicaAddresses); assertEquals(PartitionServiceState.SAFE, replicaStateChecker.getPartitionServiceState()); } @Test public void shouldNotBeSafe_whenUnknownReplicaOwnerPresent() throws UnknownHostException { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(); InternalPartitionServiceImpl partitionService = getNode(hz).partitionService; partitionService.firstArrangement(); PartitionStateManager partitionStateManager = partitionService.getPartitionStateManager(); InternalPartitionImpl partition = partitionStateManager.getPartitionImpl(0); Address[] replicaAddresses = partition.getReplicaAddresses(); Address[] illegalReplicaAddresses = Arrays.copyOf(replicaAddresses, replicaAddresses.length); Address address = new Address(replicaAddresses[0]); illegalReplicaAddresses[0] = new Address(address.getInetAddress(), address.getPort() + 1000); partition.setReplicaAddresses(illegalReplicaAddresses); PartitionReplicaStateChecker replicaStateChecker = partitionService.getPartitionReplicaStateChecker(); assertEquals(PartitionServiceState.REPLICA_NOT_OWNED, replicaStateChecker.getPartitionServiceState()); partition.setReplicaAddresses(replicaAddresses); assertEquals(PartitionServiceState.SAFE, replicaStateChecker.getPartitionServiceState()); } @Test public void shouldBeSafe_whenKnownReplicaOwnerPresent_whileNotActive() throws UnknownHostException { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(); HazelcastInstance hz2 = factory.newHazelcastInstance(); InternalPartitionServiceImpl partitionService = getNode(hz).partitionService; partitionService.firstArrangement(); changeClusterStateEventually(hz2, ClusterState.FROZEN); hz2.shutdown(); assertClusterSizeEventually(1, hz); PartitionReplicaStateChecker replicaStateChecker = partitionService.getPartitionReplicaStateChecker(); assertEquals(PartitionServiceState.SAFE, replicaStateChecker.getPartitionServiceState()); } @Test public void shouldNotBeSafe_whenUnknownReplicaOwnerPresent_whileNotActive() throws UnknownHostException { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(); HazelcastInstance hz2 = factory.newHazelcastInstance(); InternalPartitionServiceImpl partitionService = getNode(hz).partitionService; partitionService.firstArrangement(); changeClusterStateEventually(hz2, ClusterState.FROZEN); hz2.shutdown(); assertClusterSizeEventually(1, hz); PartitionStateManager partitionStateManager = partitionService.getPartitionStateManager(); InternalPartitionImpl partition = partitionStateManager.getPartitionImpl(0); Address[] replicaAddresses = partition.getReplicaAddresses(); Address[] illegalReplicaAddresses = Arrays.copyOf(replicaAddresses, replicaAddresses.length); Address address = new Address(replicaAddresses[0]); illegalReplicaAddresses[0] = new Address(address.getInetAddress(), address.getPort() + 1000); partition.setReplicaAddresses(illegalReplicaAddresses); PartitionReplicaStateChecker replicaStateChecker = partitionService.getPartitionReplicaStateChecker(); assertEquals(PartitionServiceState.REPLICA_NOT_OWNED, replicaStateChecker.getPartitionServiceState()); partition.setReplicaAddresses(replicaAddresses); assertEquals(PartitionServiceState.SAFE, replicaStateChecker.getPartitionServiceState()); } @Test public void shouldNotBeSafe_whenMigrationTasksScheduled() { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(); InternalPartitionServiceImpl partitionService = getNode(hz).partitionService; final CountDownLatch latch = new CountDownLatch(1); MigrationManager migrationManager = partitionService.getMigrationManager(); migrationManager.schedule(new MigrationRunnable() { @Override public void run() { try { latch.await(); } catch (InterruptedException e) { e.printStackTrace(); } } }); final PartitionReplicaStateChecker replicaStateChecker = partitionService.getPartitionReplicaStateChecker(); assertEquals(PartitionServiceState.MIGRATION_LOCAL, replicaStateChecker.getPartitionServiceState()); latch.countDown(); assertEqualsEventually(new Callable<PartitionServiceState>() { @Override public PartitionServiceState call() throws Exception { return replicaStateChecker.getPartitionServiceState(); } }, PartitionServiceState.SAFE); } @Test public void shouldNotBeSafe_whenReplicasAreNotSync() { Config config = new Config(); ServiceConfig serviceConfig = TestMigrationAwareService.createServiceConfig(1); config.getServicesConfig().addServiceConfig(serviceConfig); TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance hz = factory.newHazelcastInstance(config); HazelcastInstance hz2 = factory.newHazelcastInstance(config); InternalPartitionServiceImpl partitionService1 = getNode(hz).partitionService; InternalPartitionServiceImpl partitionService2 = getNode(hz2).partitionService; drainAllReplicaSyncPermits(partitionService1); drainAllReplicaSyncPermits(partitionService2); warmUpPartitions(hz, hz2); setBackupPacketDropFilter(hz, 100); setBackupPacketDropFilter(hz2, 100); NodeEngine nodeEngine = getNode(hz).nodeEngine; for (int i = 0; i < nodeEngine.getPartitionService().getPartitionCount(); i++) { Operation op = new TestPutOperationWithAsyncBackup(i); nodeEngine.getOperationService().invokeOnPartition(null, op, i).join(); } final PartitionReplicaStateChecker replicaStateChecker1 = partitionService1.getPartitionReplicaStateChecker(); final PartitionReplicaStateChecker replicaStateChecker2 = partitionService2.getPartitionReplicaStateChecker(); assertEquals(PartitionServiceState.REPLICA_NOT_SYNC, replicaStateChecker1.getPartitionServiceState()); assertEquals(PartitionServiceState.REPLICA_NOT_SYNC, replicaStateChecker2.getPartitionServiceState()); addReplicaSyncPermits(partitionService1, 100); addReplicaSyncPermits(partitionService2, 100); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(PartitionServiceState.SAFE, replicaStateChecker1.getPartitionServiceState()); assertEquals(PartitionServiceState.SAFE, replicaStateChecker2.getPartitionServiceState()); } }); } private void addReplicaSyncPermits(InternalPartitionServiceImpl partitionService, int k) { PartitionReplicaManager replicaManager = partitionService.getReplicaManager(); for (int i = 0; i < k; i++) { replicaManager.releaseReplicaSyncPermit(); } } private int drainAllReplicaSyncPermits(InternalPartitionServiceImpl partitionService) { PartitionReplicaManager replicaManager = partitionService.getReplicaManager(); int k = 0; while (replicaManager.tryToAcquireReplicaSyncPermit()) { k++; } return k; } private static class TestPutOperationWithAsyncBackup extends TestPutOperation { public TestPutOperationWithAsyncBackup() { } TestPutOperationWithAsyncBackup(int i) { super(i); } @Override public int getSyncBackupCount() { return 0; } @Override public int getAsyncBackupCount() { return super.getSyncBackupCount(); } } }
apache-2.0
xindaya/bazel
src/main/java/com/google/devtools/build/lib/packages/NonconfigurableAttributeMapper.java
1959
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import com.google.common.base.Preconditions; /** * {@link AttributeMap} implementation that triggers an {@link IllegalStateException} if called * on any attribute that supports configurable values, as determined by * {@link Attribute#isConfigurable()}. * * <p>This is particularly useful for logic that doesn't have access to configurations - it * protects against undefined behavior in response to unexpected configuration-dependent inputs. */ public class NonconfigurableAttributeMapper extends AbstractAttributeMapper { private NonconfigurableAttributeMapper(Rule rule) { super(rule.getPackage(), rule.getRuleClassObject(), rule.getLabel(), rule.getAttributeContainer()); } /** * Example usage: * * <pre> * Label fooLabel = NonconfigurableAttributeMapper.of(rule).get("foo", Type.LABEL); * </pre> */ public static NonconfigurableAttributeMapper of (Rule rule) { return new NonconfigurableAttributeMapper(rule); } @Override public <T> T get(String attributeName, com.google.devtools.build.lib.syntax.Type<T> type) { Preconditions.checkState(!getAttributeDefinition(attributeName).isConfigurable(), "Attribute '%s' is potentially configurable - not allowed here", attributeName); return super.get(attributeName, type); } }
apache-2.0
mhurne/aws-sdk-java
aws-java-sdk-emr/src/main/java/com/amazonaws/services/elasticmapreduce/model/RemoveTagsRequest.java
7929
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticmapreduce.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * This input identifies a cluster and a list of tags to remove. * </p> */ public class RemoveTagsRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Amazon EMR resource identifier from which tags will be removed. This * value must be a cluster identifier. * </p> */ private String resourceId; /** * <p> * A list of tag keys to remove from a resource. * </p> */ private com.amazonaws.internal.SdkInternalList<String> tagKeys; /** * Default constructor for RemoveTagsRequest object. Callers should use the * setter or fluent setter (with...) methods to initialize the object after * creating it. */ public RemoveTagsRequest() { } /** * Constructs a new RemoveTagsRequest object. Callers should use the setter * or fluent setter (with...) methods to initialize any additional object * members. * * @param resourceId * The Amazon EMR resource identifier from which tags will be * removed. This value must be a cluster identifier. */ public RemoveTagsRequest(String resourceId) { setResourceId(resourceId); } /** * Constructs a new RemoveTagsRequest object. Callers should use the setter * or fluent setter (with...) methods to initialize any additional object * members. * * @param resourceId * The Amazon EMR resource identifier from which tags will be * removed. This value must be a cluster identifier. * @param tagKeys * A list of tag keys to remove from a resource. */ public RemoveTagsRequest(String resourceId, java.util.List<String> tagKeys) { setResourceId(resourceId); setTagKeys(tagKeys); } /** * <p> * The Amazon EMR resource identifier from which tags will be removed. This * value must be a cluster identifier. * </p> * * @param resourceId * The Amazon EMR resource identifier from which tags will be * removed. This value must be a cluster identifier. */ public void setResourceId(String resourceId) { this.resourceId = resourceId; } /** * <p> * The Amazon EMR resource identifier from which tags will be removed. This * value must be a cluster identifier. * </p> * * @return The Amazon EMR resource identifier from which tags will be * removed. This value must be a cluster identifier. */ public String getResourceId() { return this.resourceId; } /** * <p> * The Amazon EMR resource identifier from which tags will be removed. This * value must be a cluster identifier. * </p> * * @param resourceId * The Amazon EMR resource identifier from which tags will be * removed. This value must be a cluster identifier. * @return Returns a reference to this object so that method calls can be * chained together. */ public RemoveTagsRequest withResourceId(String resourceId) { setResourceId(resourceId); return this; } /** * <p> * A list of tag keys to remove from a resource. * </p> * * @return A list of tag keys to remove from a resource. */ public java.util.List<String> getTagKeys() { if (tagKeys == null) { tagKeys = new com.amazonaws.internal.SdkInternalList<String>(); } return tagKeys; } /** * <p> * A list of tag keys to remove from a resource. * </p> * * @param tagKeys * A list of tag keys to remove from a resource. */ public void setTagKeys(java.util.Collection<String> tagKeys) { if (tagKeys == null) { this.tagKeys = null; return; } this.tagKeys = new com.amazonaws.internal.SdkInternalList<String>( tagKeys); } /** * <p> * A list of tag keys to remove from a resource. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setTagKeys(java.util.Collection)} or * {@link #withTagKeys(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tagKeys * A list of tag keys to remove from a resource. * @return Returns a reference to this object so that method calls can be * chained together. */ public RemoveTagsRequest withTagKeys(String... tagKeys) { if (this.tagKeys == null) { setTagKeys(new com.amazonaws.internal.SdkInternalList<String>( tagKeys.length)); } for (String ele : tagKeys) { this.tagKeys.add(ele); } return this; } /** * <p> * A list of tag keys to remove from a resource. * </p> * * @param tagKeys * A list of tag keys to remove from a resource. * @return Returns a reference to this object so that method calls can be * chained together. */ public RemoveTagsRequest withTagKeys(java.util.Collection<String> tagKeys) { setTagKeys(tagKeys); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceId() != null) sb.append("ResourceId: " + getResourceId() + ","); if (getTagKeys() != null) sb.append("TagKeys: " + getTagKeys()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RemoveTagsRequest == false) return false; RemoveTagsRequest other = (RemoveTagsRequest) obj; if (other.getResourceId() == null ^ this.getResourceId() == null) return false; if (other.getResourceId() != null && other.getResourceId().equals(this.getResourceId()) == false) return false; if (other.getTagKeys() == null ^ this.getTagKeys() == null) return false; if (other.getTagKeys() != null && other.getTagKeys().equals(this.getTagKeys()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceId() == null) ? 0 : getResourceId().hashCode()); hashCode = prime * hashCode + ((getTagKeys() == null) ? 0 : getTagKeys().hashCode()); return hashCode; } @Override public RemoveTagsRequest clone() { return (RemoveTagsRequest) super.clone(); } }
apache-2.0
yuri0x7c1/ofbiz-explorer
src/test/resources/apache-ofbiz-16.11.03/applications/order/src/main/java/org/apache/ofbiz/order/test/PurchaseOrderTest.java
4793
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.order.test; import java.math.BigDecimal; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.ofbiz.base.util.UtilMisc; import org.apache.ofbiz.entity.GenericValue; import org.apache.ofbiz.entity.util.EntityQuery; import org.apache.ofbiz.service.testtools.OFBizTestCase; public class PurchaseOrderTest extends OFBizTestCase { protected GenericValue userLogin = null; protected String orderId = null; protected String statusId = null; public PurchaseOrderTest(String name) { super(name); } @Override protected void setUp() throws Exception { userLogin = EntityQuery.use(delegator).from("UserLogin").where("userLoginId", "system").queryOne(); } @Override protected void tearDown() throws Exception { } public void testCreatePurchaseOrder() throws Exception { Map <String, Object> ctx = new HashMap<String, Object>(); ctx.put("partyId", "Company"); ctx.put("orderTypeId", "PURCHASE_ORDER"); ctx.put("currencyUom", "USD"); ctx.put("productStoreId","9000"); GenericValue orderItem = delegator.makeValue("OrderItem", UtilMisc.toMap("orderItemSeqId", "00001", "orderItemTypeId", "PRODUCT_ORDER_ITEM", "prodCatalogId", "DemoCatalog", "productId", "GZ-1000", "quantity", new BigDecimal("2"), "isPromo", "N")); orderItem.set("unitPrice", new BigDecimal("1399.5")); orderItem.set("unitListPrice", BigDecimal.ZERO); orderItem.set("isModifiedPrice", "N"); orderItem.set("statusId", "ITEM_CREATED"); List <GenericValue> orderItems = new LinkedList<GenericValue>(); orderItems.add(orderItem); ctx.put("orderItems", orderItems); GenericValue orderContactMech = delegator.makeValue("OrderContactMech", UtilMisc.toMap("contactMechPurposeTypeId", "SHIPPING_LOCATION", "contactMechId", "9000")); List <GenericValue> orderContactMechs = new LinkedList<GenericValue>(); orderContactMechs.add(orderContactMech); ctx.put("orderContactMechs", orderContactMechs); GenericValue orderItemContactMech = delegator.makeValue("OrderItemContactMech", UtilMisc.toMap("contactMechPurposeTypeId", "SHIPPING_LOCATION", "contactMechId", "9000", "orderItemSeqId", "00001")); List <GenericValue> orderItemContactMechs = new LinkedList<GenericValue>(); orderItemContactMechs.add(orderItemContactMech); ctx.put("orderItemContactMechs", orderItemContactMechs); GenericValue orderItemShipGroup = delegator.makeValue("OrderItemShipGroup", UtilMisc.toMap("carrierPartyId", "UPS", "contactMechId", "9000", "isGift", "N", "maySplit", "N", "shipGroupSeqId", "00001", "shipmentMethodTypeId", "NEXT_DAY")); orderItemShipGroup.set("carrierRoleTypeId","CARRIER"); List <GenericValue> orderItemShipGroupInfo = new LinkedList<GenericValue>(); orderItemShipGroupInfo.add(orderItemShipGroup); ctx.put("orderItemShipGroupInfo", orderItemShipGroupInfo); List <GenericValue> orderTerms = new LinkedList<GenericValue>(); ctx.put("orderTerms", orderTerms); List <GenericValue> orderAdjustments = new LinkedList<GenericValue>(); ctx.put("orderAdjustments", orderAdjustments); ctx.put("billToCustomerPartyId", "Company"); ctx.put("billFromVendorPartyId", "DemoSupplier"); ctx.put("shipFromVendorPartyId", "Company"); ctx.put("supplierAgentPartyId", "DemoSupplier"); ctx.put("userLogin", userLogin); Map <String, Object> resp = dispatcher.runSync("storeOrder", ctx); orderId = (String) resp.get("orderId"); statusId = (String) resp.get("statusId"); assertNotNull(orderId); assertNotNull(statusId); } }
apache-2.0
googleapis/google-api-java-client-services
clients/google-api-services-dlp/v2/1.29.2/com/google/api/services/dlp/v2/model/GooglePrivacyDlpV2CryptoKey.java
3421
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.dlp.v2.model; /** * This is a data encryption key (DEK) (as opposed to a key encryption key (KEK) stored by KMS). * When using KMS to wrap/unwrap DEKs, be sure to set an appropriate IAM policy on the KMS CryptoKey * (KEK) to ensure an attacker cannot unwrap the data crypto key. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Data Loss Prevention (DLP) API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GooglePrivacyDlpV2CryptoKey extends com.google.api.client.json.GenericJson { /** * The value may be {@code null}. */ @com.google.api.client.util.Key private GooglePrivacyDlpV2KmsWrappedCryptoKey kmsWrapped; /** * The value may be {@code null}. */ @com.google.api.client.util.Key("transient") private GooglePrivacyDlpV2TransientCryptoKey transient__; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private GooglePrivacyDlpV2UnwrappedCryptoKey unwrapped; /** * @return value or {@code null} for none */ public GooglePrivacyDlpV2KmsWrappedCryptoKey getKmsWrapped() { return kmsWrapped; } /** * @param kmsWrapped kmsWrapped or {@code null} for none */ public GooglePrivacyDlpV2CryptoKey setKmsWrapped(GooglePrivacyDlpV2KmsWrappedCryptoKey kmsWrapped) { this.kmsWrapped = kmsWrapped; return this; } /** * @return value or {@code null} for none */ public GooglePrivacyDlpV2TransientCryptoKey getTransient() { return transient__; } /** * @param transient__ transient__ or {@code null} for none */ public GooglePrivacyDlpV2CryptoKey setTransient(GooglePrivacyDlpV2TransientCryptoKey transient__) { this.transient__ = transient__; return this; } /** * @return value or {@code null} for none */ public GooglePrivacyDlpV2UnwrappedCryptoKey getUnwrapped() { return unwrapped; } /** * @param unwrapped unwrapped or {@code null} for none */ public GooglePrivacyDlpV2CryptoKey setUnwrapped(GooglePrivacyDlpV2UnwrappedCryptoKey unwrapped) { this.unwrapped = unwrapped; return this; } @Override public GooglePrivacyDlpV2CryptoKey set(String fieldName, Object value) { return (GooglePrivacyDlpV2CryptoKey) super.set(fieldName, value); } @Override public GooglePrivacyDlpV2CryptoKey clone() { return (GooglePrivacyDlpV2CryptoKey) super.clone(); } }
apache-2.0
Shashikanth-Huawei/bmp
web/gui/src/main/java/org/onosproject/ui/impl/topo/cli/ListDevices.java
1208
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.ui.impl.topo.cli; import org.apache.karaf.shell.commands.Command; import org.onosproject.ui.impl.topo.model.UiSharedTopologyModel; /** * CLI command to list the UiDevices stored in the ModelCache. */ @Command(scope = "onos", name = "ui-cache-devices", description = "Lists UiDevices in the Model Cache") public class ListDevices extends AbstractElementCommand { @Override protected void execute() { UiSharedTopologyModel model = get(UiSharedTopologyModel.class); sorted(model.getDevices()).forEach(d -> print("%s", d)); } }
apache-2.0
naver/pinpoint
plugins-it/cassandra-it/src/test/java/com/navercorp/pinpoint/plugin/cassandra/CassandraDatastax_3_0_x_IT.java
1743
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.cassandra; import com.navercorp.pinpoint.pluginit.utils.AgentPath; import com.navercorp.pinpoint.pluginit.utils.PluginITConstants; import com.navercorp.pinpoint.test.plugin.Dependency; import com.navercorp.pinpoint.test.plugin.ImportPlugin; import com.navercorp.pinpoint.test.plugin.PinpointAgent; import com.navercorp.pinpoint.test.plugin.PinpointPluginTestSuite; import org.junit.BeforeClass; import org.junit.runner.RunWith; /** * @author HyunGil Jeong */ @RunWith(PinpointPluginTestSuite.class) @PinpointAgent(AgentPath.PATH) @ImportPlugin({"com.navercorp.pinpoint:pinpoint-cassandra-driver-plugin", "com.navercorp.pinpoint:pinpoint-httpclient4-plugin"}) @Dependency({ // cassandra 4.x not supported "com.datastax.cassandra:cassandra-driver-core:[3.0.0,3.max)", PluginITConstants.VERSION, CassandraITConstants.COMMONS_PROFILER, CassandraITConstants.CASSANDRA_TESTCONTAINER}) public class CassandraDatastax_3_0_x_IT extends CassandraDatastaxITBase { @BeforeClass public static void beforeClass() { startCassandra(CassandraITConstants.CASSANDRA_3_X_IMAGE); } }
apache-2.0
coding0011/elasticsearch
server/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTests.java
10038
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations; import org.apache.lucene.util.BytesRef; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.metrics.InternalAvg; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.test.ESTestCase; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static org.elasticsearch.search.aggregations.InternalMultiBucketAggregation.resolvePropertyFromPath; import static org.hamcrest.Matchers.equalTo; public class InternalMultiBucketAggregationTests extends ESTestCase { public void testResolveToAgg() { AggregationPath path = AggregationPath.parse("the_avg"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg)); LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms"); assertThat(value[0], equalTo(agg)); } public void testResolveToAggValue() { AggregationPath path = AggregationPath.parse("the_avg.value"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg)); LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms"); assertThat(value[0], equalTo(2.0)); } public void testResolveToNothing() { AggregationPath path = AggregationPath.parse("foo.value"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg)); LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); InvalidAggregationPathException e = expectThrows(InvalidAggregationPathException.class, () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")); assertThat(e.getMessage(), equalTo("Cannot find an aggregation named [foo] in [the_long_terms]")); } public void testResolveToUnknown() { AggregationPath path = AggregationPath.parse("the_avg.unknown"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg)); LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")); assertThat(e.getMessage(), equalTo("path not supported for [the_avg]: [unknown]")); } public void testResolveToBucketCount() { AggregationPath path = AggregationPath.parse("_bucket_count"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg)); LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); Object value = resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms"); assertThat(value, equalTo(1)); } public void testResolveToCount() { AggregationPath path = AggregationPath.parse("_count"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg)); LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms"); assertThat(value[0], equalTo(1L)); } public void testResolveToKey() { AggregationPath path = AggregationPath.parse("_key"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(agg)); LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms"); assertThat(value[0], equalTo(19L)); } public void testResolveToSpecificBucket() { AggregationPath path = AggregationPath.parse("string_terms['foo']>the_avg.value"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalStringAggs = new InternalAggregations(Collections.singletonList(agg)); List<StringTerms.Bucket> stringBuckets = Collections.singletonList(new StringTerms.Bucket( new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length), 1, internalStringAggs, false, 0, DocValueFormat.RAW)); InternalTerms termsAgg = new StringTerms("string_terms", BucketOrder.count(false), 1, 0, Collections.emptyList(), Collections.emptyMap(), DocValueFormat.RAW, 1, false, 0, stringBuckets, 0); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(termsAgg)); LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); Object[] value = (Object[]) resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms"); assertThat(value[0], equalTo(2.0)); } public void testResolveToMissingSpecificBucket() { AggregationPath path = AggregationPath.parse("string_terms['bar']>the_avg.value"); List<LongTerms.Bucket> buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()); InternalAggregations internalStringAggs = new InternalAggregations(Collections.singletonList(agg)); List<StringTerms.Bucket> stringBuckets = Collections.singletonList(new StringTerms.Bucket( new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length), 1, internalStringAggs, false, 0, DocValueFormat.RAW)); InternalTerms termsAgg = new StringTerms("string_terms", BucketOrder.count(false), 1, 0, Collections.emptyList(), Collections.emptyMap(), DocValueFormat.RAW, 1, false, 0, stringBuckets, 0); InternalAggregations internalAggregations = new InternalAggregations(Collections.singletonList(termsAgg)); LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); InvalidAggregationPathException e = expectThrows(InvalidAggregationPathException.class, () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")); assertThat(e.getMessage(), equalTo("Cannot find an key ['bar'] in [string_terms]")); } }
apache-2.0
shahankhatch/aurora
src/test/java/org/apache/aurora/scheduler/thrift/aop/FeatureToggleInterceptorTest.java
2870
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.aurora.scheduler.thrift.aop; import java.lang.reflect.Method; import com.google.common.base.Predicate; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.TypeLiteral; import com.google.inject.matcher.Matchers; import org.apache.aurora.common.testing.easymock.EasyMockTest; import org.apache.aurora.gen.Response; import org.apache.aurora.gen.ResponseCode; import org.apache.aurora.gen.TaskQuery; import org.apache.aurora.scheduler.thrift.auth.DecoratedThrift; import org.easymock.EasyMock; import org.junit.Before; import org.junit.Test; import static org.easymock.EasyMock.expect; import static org.junit.Assert.assertSame; public class FeatureToggleInterceptorTest extends EasyMockTest { private AnnotatedAuroraAdmin realThrift; private AnnotatedAuroraAdmin decoratedThrift; private Predicate<Method> predicate; @Before public void setUp() { realThrift = createMock(AnnotatedAuroraAdmin.class); predicate = createMock(new Clazz<Predicate<Method>>() { }); Injector injector = Guice.createInjector(new AbstractModule() { @Override protected void configure() { MockDecoratedThrift.bindForwardedMock(binder(), realThrift); bind(new TypeLiteral<Predicate<Method>>() { }).toInstance(predicate); AopModule.bindThriftDecorator( binder(), Matchers.annotatedWith(DecoratedThrift.class), new FeatureToggleInterceptor()); } }); decoratedThrift = injector.getInstance(AnnotatedAuroraAdmin.class); } @Test public void testPredicatePass() throws Exception { TaskQuery query = new TaskQuery(); Response response = new Response() .setResponseCode(ResponseCode.OK); expect(predicate.apply(EasyMock.anyObject())).andReturn(true); expect(realThrift.getTasksStatus(query)).andReturn(response); control.replay(); assertSame(response, decoratedThrift.getTasksStatus(query)); } @Test public void testPredicateDeny() throws Exception { TaskQuery query = new TaskQuery(); expect(predicate.apply(EasyMock.anyObject())).andReturn(false); control.replay(); assertSame(ResponseCode.ERROR, decoratedThrift.getTasksStatus(query).getResponseCode()); } }
apache-2.0
puppetlabs/aws-sdk-for-java
src/main/java/com/amazonaws/services/ec2/model/LaunchPermissionModifications.java
6993
/* * Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; /** * Launch Permission Modifications */ public class LaunchPermissionModifications { private java.util.List<LaunchPermission> add; private java.util.List<LaunchPermission> remove; /** * Returns the value of the Add property for this object. * * @return The value of the Add property for this object. */ public java.util.List<LaunchPermission> getAdd() { if (add == null) { add = new java.util.ArrayList<LaunchPermission>(); } return add; } /** * Sets the value of the Add property for this object. * * @param add The new value for the Add property for this object. */ public void setAdd(java.util.Collection<LaunchPermission> add) { if (add == null) { this.add = null; return; } java.util.List<LaunchPermission> addCopy = new java.util.ArrayList<LaunchPermission>(add.size()); addCopy.addAll(add); this.add = addCopy; } /** * Sets the value of the Add property for this object. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param add The new value for the Add property for this object. * * @return A reference to this updated object so that method calls can be chained * together. */ public LaunchPermissionModifications withAdd(LaunchPermission... add) { if (getAdd() == null) setAdd(new java.util.ArrayList<LaunchPermission>(add.length)); for (LaunchPermission value : add) { getAdd().add(value); } return this; } /** * Sets the value of the Add property for this object. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param add The new value for the Add property for this object. * * @return A reference to this updated object so that method calls can be chained * together. */ public LaunchPermissionModifications withAdd(java.util.Collection<LaunchPermission> add) { if (add == null) { this.add = null; } else { java.util.List<LaunchPermission> addCopy = new java.util.ArrayList<LaunchPermission>(add.size()); addCopy.addAll(add); this.add = addCopy; } return this; } /** * Returns the value of the Remove property for this object. * * @return The value of the Remove property for this object. */ public java.util.List<LaunchPermission> getRemove() { if (remove == null) { remove = new java.util.ArrayList<LaunchPermission>(); } return remove; } /** * Sets the value of the Remove property for this object. * * @param remove The new value for the Remove property for this object. */ public void setRemove(java.util.Collection<LaunchPermission> remove) { if (remove == null) { this.remove = null; return; } java.util.List<LaunchPermission> removeCopy = new java.util.ArrayList<LaunchPermission>(remove.size()); removeCopy.addAll(remove); this.remove = removeCopy; } /** * Sets the value of the Remove property for this object. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param remove The new value for the Remove property for this object. * * @return A reference to this updated object so that method calls can be chained * together. */ public LaunchPermissionModifications withRemove(LaunchPermission... remove) { if (getRemove() == null) setRemove(new java.util.ArrayList<LaunchPermission>(remove.length)); for (LaunchPermission value : remove) { getRemove().add(value); } return this; } /** * Sets the value of the Remove property for this object. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param remove The new value for the Remove property for this object. * * @return A reference to this updated object so that method calls can be chained * together. */ public LaunchPermissionModifications withRemove(java.util.Collection<LaunchPermission> remove) { if (remove == null) { this.remove = null; } else { java.util.List<LaunchPermission> removeCopy = new java.util.ArrayList<LaunchPermission>(remove.size()); removeCopy.addAll(remove); this.remove = removeCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (add != null) sb.append("Add: " + add + ", "); if (remove != null) sb.append("Remove: " + remove + ", "); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAdd() == null) ? 0 : getAdd().hashCode()); hashCode = prime * hashCode + ((getRemove() == null) ? 0 : getRemove().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof LaunchPermissionModifications == false) return false; LaunchPermissionModifications other = (LaunchPermissionModifications)obj; if (other.getAdd() == null ^ this.getAdd() == null) return false; if (other.getAdd() != null && other.getAdd().equals(this.getAdd()) == false) return false; if (other.getRemove() == null ^ this.getRemove() == null) return false; if (other.getRemove() != null && other.getRemove().equals(this.getRemove()) == false) return false; return true; } }
apache-2.0
unisx/simplexml
test/src/org/simpleframework/xml/core/Test5Test.java
3015
package org.simpleframework.xml.core; import junit.framework.TestCase; import java.io.StringWriter; import java.util.Arrays; import org.simpleframework.xml.Element; import org.simpleframework.xml.ElementList; import org.simpleframework.xml.ElementListUnion; import org.simpleframework.xml.ElementUnion; import org.simpleframework.xml.Path; import org.simpleframework.xml.Root; import org.simpleframework.xml.Serializer; import org.simpleframework.xml.core.Persister; public class Test5Test extends TestCase { @Root(name="test5") public static class Test5 { @ElementUnion({ @Element(name="elementA", type=MyElementA.class), @Element(name="elementB", type=MyElementB.class), @Element(name="element", type=MyElement.class) }) MyElement element; java.util.ArrayList<MyElement> elements; @Path(value="elements") @ElementListUnion({ @ElementList(entry="elementA", type=MyElementA.class, inline=true), @ElementList(entry="elementB", type=MyElementB.class, inline=true), @ElementList(entry="element", type=MyElement.class, inline=true) }) java.util.ArrayList<MyElement> getElements(){ return this.elements; } @Path(value="elements") @ElementListUnion({ @ElementList(entry="elementA", type=MyElementA.class, inline=true), @ElementList(entry="elementB", type=MyElementB.class, inline=true), @ElementList(entry="element", type=MyElement.class, inline=true) }) void setElements(final java.util.ArrayList<MyElement> elements){ this.elements = elements; } Test5(){ } public Test5(final MyElement element, final MyElement... elements){ this(element, new java.util.ArrayList<MyElement>(Arrays.asList(elements))); } public Test5(final MyElement element, final java.util.ArrayList<MyElement> elements) { super(); this.element = element; this.elements = elements; } } @Root public static class MyElement{ } public static class MyElementA extends MyElement{ } public static class MyElementB extends MyElement{ } public void testSerialize() throws Exception{ Serializer s = new Persister(); StringWriter sw = new StringWriter(); //FIXME serialization is ok s.write(new Test5(new MyElementA(), new MyElementA(), new MyElementB()), sw); String serializedForm = sw.toString(); System.out.println(serializedForm); System.out.println(); //FIXME but no idea what is happening Test5 o = s.read(Test5.class, serializedForm); sw.getBuffer().setLength(0); s.write(o, sw); System.out.println(sw.toString()); System.out.println(); sw.getBuffer().setLength(0); } }
apache-2.0
routexl/graphhopper
core/src/main/java/com/graphhopper/util/DistancePlaneProjection.java
2908
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.util; import static java.lang.Math.*; /** * Calculates the approximate distance of two points on earth. Very good results if delat_lon is * not too big (see DistanceCalcTest), e.g. the distance is small. * <p> * http://en.wikipedia.org/wiki/Geographical_distance#Spherical_Earth_projected_to_a_plane * <p> * http://stackoverflow.com/q/1006654 * <p> * http://en.wikipedia.org/wiki/Mercator_projection#Mathematics_of_the_Mercator_projection * http://gis.stackexchange.com/questions/4906/why-is-law-of-cosines-more-preferable-than-haversine-when-calculating-distance-b * <p> * * @author Peter Karich */ public class DistancePlaneProjection extends DistanceCalcEarth { public static final DistancePlaneProjection DIST_PLANE = new DistancePlaneProjection(); @Override public double calcDist(double fromLat, double fromLon, double toLat, double toLon) { double normedDist = calcNormalizedDist(fromLat, fromLon, toLat, toLon); return R * sqrt(normedDist); } @Override public double calcDist3D(double fromLat, double fromLon, double fromHeight, double toLat, double toLon, double toHeight) { double dEleNorm = hasElevationDiff(fromHeight, toHeight) ? calcNormalizedDist(toHeight - fromHeight) : 0; double normedDist = calcNormalizedDist(fromLat, fromLon, toLat, toLon); return R * sqrt(normedDist + dEleNorm); } @Override public double calcDenormalizedDist(double normedDist) { return R * sqrt(normedDist); } @Override public double calcNormalizedDist(double dist) { double tmp = dist / R; return tmp * tmp; } @Override public double calcNormalizedDist(double fromLat, double fromLon, double toLat, double toLon) { double dLat = toRadians(toLat - fromLat); double dLon = toRadians(toLon - fromLon); double left = cos(toRadians((fromLat + toLat) / 2)) * dLon; return dLat * dLat + left * left; } @Override public String toString() { return "PLANE_PROJ"; } }
apache-2.0
cguzel/gora
gora-core/src/main/java/org/apache/gora/store/DataStoreFactory.java
18590
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gora.store; import java.io.IOException; import java.io.InputStream; import java.util.Properties; import java.util.Locale; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.gora.persistency.Persistent; import org.apache.gora.store.impl.DataStoreBase; import org.apache.gora.util.ClassLoadingUtils; import org.apache.gora.util.GoraException; import org.apache.gora.util.ReflectionUtils; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; /** * A Factory for {@link DataStore}s. DataStoreFactory instances are thread-safe. */ public class DataStoreFactory{ public static final Logger log = LoggerFactory.getLogger(DataStoreFactory.class); public static final String GORA_DEFAULT_PROPERTIES_FILE = "gora.properties"; public static final String GORA_DEFAULT_DATASTORE_KEY = "gora.datastore.default"; /*This selects the default caching dataStore which wraps any GORA persistency dataStore*/ public static final String GORA_DEFAULT_CACHE_DATASTORE_KEY = "gora.cache.datastore.default"; public static final String GORA = "gora"; public static final String DATASTORE = "datastore"; private static final String GORA_DATASTORE = GORA + "." + DATASTORE + "."; public static final String AUTO_CREATE_SCHEMA = "autocreateschema"; public static final String INPUT_PATH = "input.path"; public static final String OUTPUT_PATH = "output.path"; public static final String MAPPING_FILE = "mapping.file"; public static final String SCHEMA_NAME = "schema.name"; /** * Creates a new {@link Properties}. It adds the default gora configuration * resources. This properties object can be modified and used to instantiate * store instances. It is recommended to use a properties object for a single * store, because the properties object is passed on to store initialization * methods that are able to store the properties as a field. * @return The new properties object. */ public static Properties createProps() { try { Properties properties = new Properties(); InputStream stream = DataStoreFactory.class.getClassLoader() .getResourceAsStream(GORA_DEFAULT_PROPERTIES_FILE); if(stream != null) { try { properties.load(stream); return properties; } finally { stream.close(); } } else { log.warn(GORA_DEFAULT_PROPERTIES_FILE + " not found, properties will be empty."); } return properties; } catch(Exception e) { throw new RuntimeException(e); } } private DataStoreFactory() { } private static <K, T extends Persistent> void initializeDataStore( DataStore<K, T> dataStore, Class<K> keyClass, Class<T> persistent, Properties properties) throws IOException { dataStore.initialize(keyClass, persistent, properties); } /** * Instantiate a new {@link DataStore}. Uses default properties. Uses 'null' schema. * * @param dataStoreClass The datastore implementation class. * @param keyClass The key class. * @param persistent The value class. * @param conf {@link Configuration} to be used be the store. * @return A new store instance. * @throws GoraException */ public static <D extends DataStore<K,T>, K, T extends Persistent> D createDataStore(Class<D> dataStoreClass , Class<K> keyClass, Class<T> persistent, Configuration conf) throws GoraException { return createDataStore(dataStoreClass, keyClass, persistent, conf, createProps(), null); } /** * Instantiate a new {@link DataStore}. Uses default properties. * * @param dataStoreClass The datastore implementation class. * @param keyClass The key class. * @param persistent The value class. * @param conf {@link Configuration} to be used be the store. * @param schemaName A default schemaname that will be put on the properties. * @return A new store instance. * @throws GoraException */ public static <D extends DataStore<K,T>, K, T extends Persistent> D createDataStore(Class<D> dataStoreClass , Class<K> keyClass, Class<T> persistent, Configuration conf, String schemaName) throws GoraException { return createDataStore(dataStoreClass, keyClass, persistent, conf, createProps(), schemaName); } /** * Instantiate a new {@link DataStore}. * * @param dataStoreClass The datastore implementation class. * @param keyClass The key class. * @param persistent The value class. * @param conf {@link Configuration} to be used be the store. * @param properties The properties to be used be the store. * @param schemaName A default schemaname that will be put on the properties. * @return A new store instance. * @throws GoraException */ public static <D extends DataStore<K,T>, K, T extends Persistent> D createDataStore(Class<D> dataStoreClass, Class<K> keyClass , Class<T> persistent, Configuration conf, Properties properties, String schemaName) throws GoraException { try { setDefaultSchemaName(properties, schemaName); D dataStore = ReflectionUtils.newInstance(dataStoreClass); if ((dataStore instanceof Configurable) && conf != null) { ((Configurable)dataStore).setConf(conf); } initializeDataStore(dataStore, keyClass, persistent, properties); return dataStore; } catch (GoraException ex) { throw ex; } catch(Exception ex) { throw new GoraException(ex); } } /** * Instantiate a new {@link DataStore}. Uses 'null' schema. * * @param dataStoreClass The datastore implementation class. * @param keyClass The key class. * @param persistent The value class. * @param conf {@link Configuration} to be used be the store. * @param properties The properties to be used be the store. * @return A new store instance. * @throws GoraException */ public static <D extends DataStore<K,T>, K, T extends Persistent> D createDataStore(Class<D> dataStoreClass , Class<K> keyClass, Class<T> persistent, Configuration conf, Properties properties) throws GoraException { return createDataStore(dataStoreClass, keyClass, persistent, conf, properties, null); } /** * Instantiate a new {@link DataStore}. Uses default properties. Uses 'null' schema. * * @param dataStoreClass The datastore implementation class. * @param keyClass The key class. * @param persistentClass The value class. * @param conf {@link Configuration} to be used be the store. * @return A new store instance. * @throws GoraException */ public static <D extends DataStore<K,T>, K, T extends Persistent> D getDataStore( Class<D> dataStoreClass, Class<K> keyClass, Class<T> persistentClass, Configuration conf) throws GoraException { return createDataStore(dataStoreClass, keyClass, persistentClass, conf, createProps(), null); } /** * Instantiate a new {@link DataStore}. Uses default properties. Uses 'null' schema. * * @param dataStoreClass The datastore implementation class <i>as string</i>. * @param keyClass The key class. * @param persistentClass The value class. * @param conf {@link Configuration} to be used be the store. * @return A new store instance. * @throws GoraException */ @SuppressWarnings("unchecked") public static <K, T extends Persistent> DataStore<K, T> getDataStore( String dataStoreClass, Class<K> keyClass, Class<T> persistentClass, Configuration conf) throws GoraException { try { Class<? extends DataStore<K,T>> c = (Class<? extends DataStore<K, T>>) ClassLoadingUtils.loadClass(dataStoreClass); return createDataStore(c, keyClass, persistentClass, conf, createProps(), null); } catch(GoraException ex) { throw ex; } catch (Exception ex) { throw new GoraException(ex); } } /** * Instantiate a new {@link DataStore}. Uses default properties. Uses 'null' schema. * * @param dataStoreClass The datastore implementation class <i>as string</i>. * @param keyClass The key class <i>as string</i>. * @param persistentClass The value class <i>as string</i>. * @param conf {@link Configuration} to be used be the store. * @return A new store instance. * @throws GoraException */ @SuppressWarnings({ "unchecked" }) public static <K, T extends Persistent> DataStore<K, T> getDataStore( String dataStoreClass, String keyClass, String persistentClass, Configuration conf) throws GoraException { try { Class<? extends DataStore<K,T>> c = (Class<? extends DataStore<K, T>>) Class.forName(dataStoreClass); Class<K> k = (Class<K>) ClassLoadingUtils.loadClass(keyClass); Class<T> p = (Class<T>) ClassLoadingUtils.loadClass(persistentClass); return createDataStore(c, k, p, conf, createProps(), null); } catch(GoraException ex) { throw ex; } catch (Exception ex) { throw new GoraException(ex); } } /** * Instantiate <i>the default</i> {@link DataStore}. Uses default properties. Uses 'null' schema. * * Note: * consider that default dataStore is always visible * * @param keyClass The key class. * @param persistent The value class. * @param conf {@link Configuration} to be used be the store. * @return A new store instance. * @throws GoraException */ @SuppressWarnings("unchecked") public static <K, T extends Persistent> DataStore<K, T> getDataStore( Class<K> keyClass, Class<T> persistent, Configuration conf) throws GoraException { Properties createProps = createProps(); Class<? extends DataStore<K, T>> c; try { c = (Class<? extends DataStore<K, T>>) Class.forName(getDefaultDataStore(createProps)); } catch (Exception ex) { throw new GoraException(ex); } return createDataStore(c, keyClass, persistent, conf, createProps, null); } /** * Instantiate <i>the default</i> {@link DataStore} wrapped over caching dataStore which provides caching * abstraction over the GORA persistence dataStore. * Uses default properties. Uses 'null' schema. * * Note: * consider that default dataStore is always visible * * @param keyClass The key class. * @param persistent The value class. * @param conf {@link Configuration} To be used be the store. * @param isCacheEnabled Caching enable or not. * @return A new store instance. * @throws GoraException If cache or persistency dataStore initialization interrupted. */ @SuppressWarnings("unchecked") public static <K, T extends Persistent> DataStore<K, T> getDataStore( Class<K> keyClass, Class<T> persistent, Configuration conf, boolean isCacheEnabled) throws GoraException { Properties createProps = createProps(); Class<? extends DataStore<K, T>> c; try { if (isCacheEnabled) { c = (Class<? extends DataStore<K, T>>) Class.forName(getDefaultCacheDataStore(createProps)); } else { c = (Class<? extends DataStore<K, T>>) Class.forName(getDefaultDataStore(createProps)); } } catch (Exception ex) { throw new GoraException(ex); } return createDataStore(c, keyClass, persistent, conf, createProps, null); } /** * Tries to find a property with the given baseKey. First the property * key constructed as "gora.&lt;classname&gt;.&lt;baseKey&gt;" is searched. * If not found, the property keys for all superclasses is recursively * tested. Lastly, the property key constructed as * "gora.datastore.&lt;baseKey&gt;" is searched. * @return the first found value, or defaultValue */ public static String findProperty(Properties properties , DataStore<?, ?> store, String baseKey, String defaultValue) { //recursively try the class names until the base class Class<?> clazz = store.getClass(); while(true) { String fullKey = GORA + "." + org.apache.gora.util.StringUtils.getClassname(clazz).toLowerCase(Locale.getDefault()) + "." + baseKey; String value = getProperty(properties, fullKey); if(value != null) { return value; } if(clazz.equals(DataStoreBase.class)) { break; } clazz = clazz.getSuperclass(); if(clazz == null) { break; } } //try with "datastore" String fullKey = GORA + "." + DATASTORE + "." + baseKey; String value = getProperty(properties, fullKey); if(value != null) { return value; } return defaultValue; } /** * Tries to find a property with the given baseKey. First the property * key constructed as "gora.&lt;classname&gt;.&lt;baseKey&gt;" is searched. * If not found, the property keys for all superclasses is recursively * tested. Lastly, the property key constructed as * "gora.datastore.&lt;baseKey&gt;" is searched. * @return the first found value, or throws IOException */ public static String findPropertyOrDie(Properties properties , DataStore<?, ?> store, String baseKey) throws IOException { String val = findProperty(properties, store, baseKey, null); if(val == null) { throw new IOException("Property with base name \""+baseKey+"\" could not be found, make " + "sure to include this property in gora.properties file"); } return val; } public static boolean findBooleanProperty(Properties properties , DataStore<?, ?> store, String baseKey, String defaultValue) { return Boolean.parseBoolean(findProperty(properties, store, baseKey, defaultValue)); } public static boolean getAutoCreateSchema(Properties properties , DataStore<?,?> store) { return findBooleanProperty(properties, store, AUTO_CREATE_SCHEMA, "true"); } /** * Returns the input path as read from the properties for file-backed data stores. */ public static String getInputPath(Properties properties, DataStore<?,?> store) { return findProperty(properties, store, INPUT_PATH, null); } /** * Returns the output path as read from the properties for file-backed data stores. */ public static String getOutputPath(Properties properties, DataStore<?,?> store) { return findProperty(properties, store, OUTPUT_PATH, null); } /** * Looks for the <code>gora-&lt;classname&gt;-mapping.xml</code> as a resource * on the classpath. This can however also be specified within the * <code>gora.properties</code> file with the key * <code>gora.&lt;classname&gt;.mapping.file=</code>. * @param properties which hold keys from which we can obtain values for datastore mappings. * @param store {@link org.apache.gora.store.DataStore} object to get the mapping for. * @param defaultValue default value for the <code>gora-&lt;classname&gt;-mapping.xml</code> * @return mappingFilename if one is located. * @throws IOException if there is a problem reading or obtaining the mapping file. */ public static String getMappingFile(Properties properties, DataStore<?,?> store , String defaultValue) throws IOException { String mappingFilename = findProperty(properties, store, MAPPING_FILE, defaultValue); InputStream mappingFile = store.getClass().getClassLoader().getResourceAsStream(mappingFilename); if (mappingFile == null) throw new IOException("Unable to open mapping file: "+mappingFilename); mappingFile.close(); return mappingFilename; } private static String getDefaultDataStore(Properties properties) { return getProperty(properties, GORA_DEFAULT_DATASTORE_KEY); } private static String getDefaultCacheDataStore(Properties properties) { return getProperty(properties, GORA_DEFAULT_CACHE_DATASTORE_KEY); } private static String getProperty(Properties properties, String key) { return getProperty(properties, key, null); } private static String getProperty(Properties properties, String key, String defaultValue) { String regex = "[a-z_\\.]*"; if (properties == null) { return defaultValue; } if (!key.matches(regex)) { log.warn("Keys should be LOWERCASE. Please change that!"); log.warn("Using lowecase for key " + key); key = key.toLowerCase(Locale.getDefault()); } String result = properties.getProperty(key); if (result == null) { return defaultValue; } return result; } /** * Set a property */ private static void setProperty(Properties properties, String baseKey, String value) { if(value != null) { properties.setProperty(GORA_DATASTORE + baseKey, value); } } /** * Sets a property for the datastore of the given class */ private static<D extends DataStore<K,T>, K, T extends Persistent> void setProperty(Properties properties, Class<D> dataStoreClass, String baseKey, String value) { properties.setProperty(GORA+"."+org.apache.gora.util.StringUtils.getClassname(dataStoreClass)+"."+baseKey, value); } /** * Gets the default schema name of a given store class */ public static String getDefaultSchemaName(Properties properties, DataStore<?,?> store) { return findProperty(properties, store, SCHEMA_NAME, null); } /** * Sets the default schema name. */ public static void setDefaultSchemaName(Properties properties, String schemaName) { if (schemaName != null) { setProperty(properties, SCHEMA_NAME, schemaName); } } /** * Sets the default schema name to be used by the datastore of the given class */ public static<D extends DataStore<K,T>, K, T extends Persistent> void setDefaultSchemaName(Properties properties, Class<D> dataStoreClass, String schemaName) { setProperty(properties, dataStoreClass, SCHEMA_NAME, schemaName); } }
apache-2.0
sn0wolf/spring-boot-shiro-orientdb
src/main/java/com/github/pires/example/ApplicationExceptionHandler.java
951
package com.github.pires.example; import org.apache.shiro.authc.AuthenticationException; import org.apache.shiro.authc.IncorrectCredentialsException; import org.apache.shiro.authc.UnknownAccountException; import org.apache.shiro.authz.UnauthenticatedException; import org.apache.shiro.authz.UnauthorizedException; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseStatus; /** * TODO add description */ @ControllerAdvice public class ApplicationExceptionHandler { @ResponseStatus(HttpStatus.UNAUTHORIZED) @ExceptionHandler( {AuthenticationException.class, UnknownAccountException.class, UnauthenticatedException.class, IncorrectCredentialsException.class, UnauthorizedException.class}) public void unauthorized() { } }
apache-2.0
djechelon/spring-security
config/src/test/java/org/springframework/security/htmlunit/server/MockWebResponseBuilder.java
2639
/* * Copyright 2002-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.htmlunit.server; import java.io.IOException; import java.util.ArrayList; import java.util.List; import com.gargoylesoftware.htmlunit.WebRequest; import com.gargoylesoftware.htmlunit.WebResponse; import com.gargoylesoftware.htmlunit.WebResponseData; import com.gargoylesoftware.htmlunit.util.NameValuePair; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.test.web.reactive.server.FluxExchangeResult; import org.springframework.util.Assert; /** * @author Rob Winch * @since 5.0 */ final class MockWebResponseBuilder { private final long startTime; private final WebRequest webRequest; private final FluxExchangeResult<String> exchangeResult; MockWebResponseBuilder(long startTime, WebRequest webRequest, FluxExchangeResult<String> exchangeResult) { Assert.notNull(webRequest, "WebRequest must not be null"); Assert.notNull(exchangeResult, "FluxExchangeResult must not be null"); this.startTime = startTime; this.webRequest = webRequest; this.exchangeResult = exchangeResult; } WebResponse build() throws IOException { WebResponseData webResponseData = webResponseData(); long endTime = System.currentTimeMillis(); return new WebResponse(webResponseData, this.webRequest, endTime - this.startTime); } private WebResponseData webResponseData() { List<NameValuePair> responseHeaders = responseHeaders(); HttpStatus status = this.exchangeResult.getStatus(); return new WebResponseData(this.exchangeResult.getResponseBodyContent(), status.value(), status.getReasonPhrase(), responseHeaders); } private List<NameValuePair> responseHeaders() { HttpHeaders responseHeaders = this.exchangeResult.getResponseHeaders(); List<NameValuePair> result = new ArrayList<>(responseHeaders.size()); responseHeaders.forEach((headerName, headerValues) -> headerValues .forEach((headerValue) -> result.add(new NameValuePair(headerName, headerValue)))); return result; } }
apache-2.0
mposolda/keycloak
testsuite/integration-arquillian/tests/base/src/main/java/org/keycloak/testsuite/arquillian/KeycloakArquillianExtension.java
4834
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.arquillian; import org.jboss.arquillian.container.osgi.OSGiApplicationArchiveProcessor; import org.jboss.arquillian.container.test.impl.enricher.resource.URLResourceProvider; import org.jboss.arquillian.container.test.spi.client.deployment.ApplicationArchiveProcessor; import org.jboss.arquillian.container.test.spi.client.deployment.DeploymentScenarioGenerator; import org.jboss.arquillian.core.spi.LoadableExtension; import org.jboss.arquillian.graphene.location.ContainerCustomizableURLResourceProvider; import org.jboss.arquillian.graphene.location.CustomizableURLResourceProvider; import org.jboss.arquillian.test.spi.TestEnricher; import org.jboss.arquillian.test.spi.enricher.resource.ResourceProvider; import org.jboss.arquillian.test.spi.execution.TestExecutionDecider; import org.keycloak.testsuite.arquillian.h2.H2TestEnricher; import org.keycloak.testsuite.arquillian.jmx.JmxConnectorRegistryCreator; import org.keycloak.testsuite.arquillian.decider.AdapterTestExecutionDecider; import org.keycloak.testsuite.arquillian.decider.MigrationTestExecutionDecider; import org.keycloak.testsuite.arquillian.decider.AuthServerExcludeExecutionDecider; import org.keycloak.testsuite.arquillian.provider.AdminClientProvider; import org.keycloak.testsuite.arquillian.provider.LoadBalancerControllerProvider; import org.keycloak.testsuite.arquillian.provider.OAuthClientProvider; import org.keycloak.testsuite.arquillian.provider.SuiteContextProvider; import org.keycloak.testsuite.arquillian.provider.TestContextProvider; import org.keycloak.testsuite.arquillian.provider.URLProvider; import org.keycloak.testsuite.drone.HtmlUnitScreenshots; import org.keycloak.testsuite.drone.KeycloakDronePostSetup; import org.keycloak.testsuite.drone.KeycloakWebDriverConfigurator; import org.keycloak.testsuite.utils.arquillian.fuse.KeycloakOSGiApplicationArchiveProcessor; /** * * @author tkyjovsk */ public class KeycloakArquillianExtension implements LoadableExtension { @Override public void register(ExtensionBuilder builder) { builder .service(ResourceProvider.class, SuiteContextProvider.class) .service(ResourceProvider.class, TestContextProvider.class) .service(ResourceProvider.class, AdminClientProvider.class) .service(ResourceProvider.class, OAuthClientProvider.class) .service(ResourceProvider.class, LoadBalancerControllerProvider.class); builder .service(DeploymentScenarioGenerator.class, DeploymentTargetModifier.class) .service(ApplicationArchiveProcessor.class, DeploymentArchiveProcessor.class) .service(TestEnricher.class, CacheStatisticsControllerEnricher.class) .observer(JmxConnectorRegistryCreator.class) .observer(AuthServerTestEnricher.class) .observer(AppServerTestEnricher.class) .observer(CrossDCTestEnricher.class) .observer(HotRodStoreTestEnricher.class) .observer(H2TestEnricher.class); builder .service(TestExecutionDecider.class, MigrationTestExecutionDecider.class) .service(TestExecutionDecider.class, AdapterTestExecutionDecider.class) .service(TestExecutionDecider.class, VaultTestExecutionDecider.class) .service(TestExecutionDecider.class, AuthServerExcludeExecutionDecider.class); builder .override(ResourceProvider.class, URLResourceProvider.class, URLProvider.class) .override(ResourceProvider.class, CustomizableURLResourceProvider.class, URLProvider.class) .override(ApplicationArchiveProcessor.class, OSGiApplicationArchiveProcessor.class, KeycloakOSGiApplicationArchiveProcessor.class) .override(ResourceProvider.class, ContainerCustomizableURLResourceProvider.class, URLProvider.class); builder .observer(KeycloakWebDriverConfigurator.class) .observer(HtmlUnitScreenshots.class) .observer(KeycloakDronePostSetup.class); } }
apache-2.0
britter/commons-lang
src/test/java/org/apache/commons/lang3/ArrayUtilsInsertTest.java
13102
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertNotSame; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import org.junit.jupiter.api.Test; /** * Tests ArrayUtils insert methods. */ public class ArrayUtilsInsertTest { @Test public void testInsertBooleans() { final boolean[] array = {true, false, true}; final boolean[] values = {false, true, false}; final boolean[] result = ArrayUtils.insert(42, array, null); assertArrayEquals(array, result); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new boolean[0], ArrayUtils.insert(0, new boolean[0], null)); assertNull(ArrayUtils.insert(42, (boolean[]) null, null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new boolean[]{false, true, false, true}, ArrayUtils.insert(0, array, false)); assertArrayEquals(new boolean[]{true, false, false, true}, ArrayUtils.insert(1, array, false)); assertArrayEquals(new boolean[]{true, false, true, false}, ArrayUtils.insert(array.length, array, false)); assertArrayEquals(new boolean[]{false, true, false, true, false, true}, ArrayUtils.insert(0, array, values)); assertArrayEquals(new boolean[]{true, false, true, false, false, true}, ArrayUtils.insert(1, array, values)); assertArrayEquals(new boolean[]{true, false, true, false, true, false}, ArrayUtils.insert(array.length, array, values)); } @Test public void testInsertBytes() { final byte[] array = {1, 2, 3}; final byte[] values = {4, 5, 6}; final byte[] result = ArrayUtils.insert(42, array, null); assertArrayEquals(array, result); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new byte[0], ArrayUtils.insert(0, new byte[0], null)); assertNull(ArrayUtils.insert(42, (byte[]) null, null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new byte[]{0, 1, 2, 3}, ArrayUtils.insert(0, array, (byte) 0)); assertArrayEquals(new byte[]{1, 0, 2, 3}, ArrayUtils.insert(1, array, (byte) 0)); assertArrayEquals(new byte[]{1, 2, 3, 0}, ArrayUtils.insert(array.length, array, (byte) 0)); assertArrayEquals(new byte[]{4, 5, 6, 1, 2, 3}, ArrayUtils.insert(0, array, values)); assertArrayEquals(new byte[]{1, 4, 5, 6, 2, 3}, ArrayUtils.insert(1, array, values)); assertArrayEquals(new byte[]{1, 2, 3, 4, 5, 6}, ArrayUtils.insert(array.length, array, values)); } @Test public void testInsertChars() { final char[] array = {'a', 'b', 'c'}; final char[] values = {'d', 'e', 'f'}; final char[] result = ArrayUtils.insert(42, array, null); assertArrayEquals(array, result); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new char[0], ArrayUtils.insert(0, new char[0], null)); assertNull(ArrayUtils.insert(42, (char[]) null, null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new char[]{'z', 'a', 'b', 'c'}, ArrayUtils.insert(0, array, 'z')); assertArrayEquals(new char[]{'a', 'z', 'b', 'c'}, ArrayUtils.insert(1, array, 'z')); assertArrayEquals(new char[]{'a', 'b', 'c', 'z'}, ArrayUtils.insert(array.length, array, 'z')); assertArrayEquals(new char[]{'d', 'e', 'f', 'a', 'b', 'c'}, ArrayUtils.insert(0, array, values)); assertArrayEquals(new char[]{'a', 'd', 'e', 'f', 'b', 'c'}, ArrayUtils.insert(1, array, values)); assertArrayEquals(new char[]{'a', 'b', 'c', 'd', 'e', 'f'}, ArrayUtils.insert(array.length, array, values)); } @Test public void testInsertDoubles() { final double[] array = {1, 2, 3}; final double[] values = {4, 5, 6}; final double delta = 0.000001; final double[] result = ArrayUtils.insert(42, array, null); assertArrayEquals(array, result, delta); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new double[0], ArrayUtils.insert(0, new double[0], null), delta); assertNull(ArrayUtils.insert(42, (double[]) null, null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new double[]{0, 1, 2, 3}, ArrayUtils.insert(0, array, 0), delta); assertArrayEquals(new double[]{1, 0, 2, 3}, ArrayUtils.insert(1, array, 0), delta); assertArrayEquals(new double[]{1, 2, 3, 0}, ArrayUtils.insert(array.length, array, 0), delta); assertArrayEquals(new double[]{4, 5, 6, 1, 2, 3}, ArrayUtils.insert(0, array, values), delta); assertArrayEquals(new double[]{1, 4, 5, 6, 2, 3}, ArrayUtils.insert(1, array, values), delta); assertArrayEquals(new double[]{1, 2, 3, 4, 5, 6}, ArrayUtils.insert(array.length, array, values), delta); } @Test public void testInsertFloats() { final float[] array = {1, 2, 3}; final float[] values = {4, 5, 6}; final float delta = 0.000001f; final float[] result = ArrayUtils.insert(42, array, null); assertArrayEquals(array, result, delta); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new float[0], ArrayUtils.insert(0, new float[0], null), delta); assertNull(ArrayUtils.insert(42, (float[]) null, null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new float[]{0, 1, 2, 3}, ArrayUtils.insert(0, array, 0), delta); assertArrayEquals(new float[]{1, 0, 2, 3}, ArrayUtils.insert(1, array, 0), delta); assertArrayEquals(new float[]{1, 2, 3, 0}, ArrayUtils.insert(array.length, array, 0), delta); assertArrayEquals(new float[]{4, 5, 6, 1, 2, 3}, ArrayUtils.insert(0, array, values), delta); assertArrayEquals(new float[]{1, 4, 5, 6, 2, 3}, ArrayUtils.insert(1, array, values), delta); assertArrayEquals(new float[]{1, 2, 3, 4, 5, 6}, ArrayUtils.insert(array.length, array, values), delta); } @Test public void testInsertGenericArray() { final String[] array = {"a", "b", "c"}; final String[] values = {"d", "e", "f"}; final String[] result = ArrayUtils.insert(42, array, (String[]) null); assertArrayEquals(array, result); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new String[0], ArrayUtils.insert(0, new String[0], (String[]) null)); assertNull(ArrayUtils.insert(42, null, (String[]) null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new String[]{"z", "a", "b", "c"}, ArrayUtils.insert(0, array, "z")); assertArrayEquals(new String[]{"a", "z", "b", "c"}, ArrayUtils.insert(1, array, "z")); assertArrayEquals(new String[]{"a", "b", "c", "z"}, ArrayUtils.insert(array.length, array, "z")); assertArrayEquals(new String[]{"d", "e", "f", "a", "b", "c"}, ArrayUtils.insert(0, array, values)); assertArrayEquals(new String[]{"a", "d", "e", "f", "b", "c"}, ArrayUtils.insert(1, array, values)); assertArrayEquals(new String[]{"a", "b", "c", "d", "e", "f"}, ArrayUtils.insert(array.length, array, values)); } @Test public void testInsertInts() { final int[] array = {1, 2, 3}; final int[] values = {4, 5, 6}; final int[] result = ArrayUtils.insert(42, array, null); assertArrayEquals(array, result); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new int[0], ArrayUtils.insert(0, new int[0], null)); assertNull(ArrayUtils.insert(42, (int[]) null, null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new int[]{0, 1, 2, 3}, ArrayUtils.insert(0, array, 0)); assertArrayEquals(new int[]{1, 0, 2, 3}, ArrayUtils.insert(1, array, 0)); assertArrayEquals(new int[]{1, 2, 3, 0}, ArrayUtils.insert(array.length, array, 0)); assertArrayEquals(new int[]{4, 5, 6, 1, 2, 3}, ArrayUtils.insert(0, array, values)); assertArrayEquals(new int[]{1, 4, 5, 6, 2, 3}, ArrayUtils.insert(1, array, values)); assertArrayEquals(new int[]{1, 2, 3, 4, 5, 6}, ArrayUtils.insert(array.length, array, values)); } @Test public void testInsertLongs() { final long[] array = {1, 2, 3}; final long[] values = {4, 5, 6}; final long[] result = ArrayUtils.insert(42, array, null); assertArrayEquals(array, result); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new long[0], ArrayUtils.insert(0, new long[0], null)); assertNull(ArrayUtils.insert(42, (long[]) null, null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new long[]{0, 1, 2, 3}, ArrayUtils.insert(0, array, 0)); assertArrayEquals(new long[]{1, 0, 2, 3}, ArrayUtils.insert(1, array, 0)); assertArrayEquals(new long[]{1, 2, 3, 0}, ArrayUtils.insert(array.length, array, 0)); assertArrayEquals(new long[]{4, 5, 6, 1, 2, 3}, ArrayUtils.insert(0, array, values)); assertArrayEquals(new long[]{1, 4, 5, 6, 2, 3}, ArrayUtils.insert(1, array, values)); assertArrayEquals(new long[]{1, 2, 3, 4, 5, 6}, ArrayUtils.insert(array.length, array, values)); } @Test public void testInsertShorts() { final short[] array = {1, 2, 3}; final short[] values = {4, 5, 6}; final short[] result = ArrayUtils.insert(42, array, null); assertArrayEquals(array, result); assertNotSame(array, result); assertNull(ArrayUtils.insert(42, null, array)); assertArrayEquals(new short[0], ArrayUtils.insert(0, new short[0], null)); assertNull(ArrayUtils.insert(42, (short[]) null, null)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(-1, array, array)); assertThrows(IndexOutOfBoundsException.class, () -> ArrayUtils.insert(array.length + 1, array, array)); assertArrayEquals(new short[]{0, 1, 2, 3}, ArrayUtils.insert(0, array, (short) 0)); assertArrayEquals(new short[]{1, 0, 2, 3}, ArrayUtils.insert(1, array, (short) 0)); assertArrayEquals(new short[]{1, 2, 3, 0}, ArrayUtils.insert(array.length, array, (short) 0)); assertArrayEquals(new short[]{4, 5, 6, 1, 2, 3}, ArrayUtils.insert(0, array, values)); assertArrayEquals(new short[]{1, 4, 5, 6, 2, 3}, ArrayUtils.insert(1, array, values)); assertArrayEquals(new short[]{1, 2, 3, 4, 5, 6}, ArrayUtils.insert(array.length, array, values)); } }
apache-2.0
zhangdian/solr4.6.0
lucene/expressions/src/test/org/apache/lucene/expressions/js/TestJavascriptFunction.java
10853
package org.apache.lucene.expressions.js; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.expressions.Expression; import org.apache.lucene.util.LuceneTestCase; public class TestJavascriptFunction extends LuceneTestCase { private static double DELTA = 0.0000001; private void assertEvaluatesTo(String expression, double expected) throws Exception { Expression evaluator = JavascriptCompiler.compile(expression); double actual = evaluator.evaluate(0, null); assertEquals(expected, actual, DELTA); } public void testAbsMethod() throws Exception { assertEvaluatesTo("abs(0)", 0); assertEvaluatesTo("abs(119)", 119); assertEvaluatesTo("abs(119)", 119); assertEvaluatesTo("abs(1)", 1); assertEvaluatesTo("abs(-1)", 1); } public void testAcosMethod() throws Exception { assertEvaluatesTo("acos(-1)", Math.PI); assertEvaluatesTo("acos(-0.8660254)", Math.PI*5/6); assertEvaluatesTo("acos(-0.7071068)", Math.PI*3/4); assertEvaluatesTo("acos(-0.5)", Math.PI*2/3); assertEvaluatesTo("acos(0)", Math.PI/2); assertEvaluatesTo("acos(0.5)", Math.PI/3); assertEvaluatesTo("acos(0.7071068)", Math.PI/4); assertEvaluatesTo("acos(0.8660254)", Math.PI/6); assertEvaluatesTo("acos(1)", 0); } public void testAcoshMethod() throws Exception { assertEvaluatesTo("acosh(1)", 0); assertEvaluatesTo("acosh(2.5)", 1.5667992369724109); assertEvaluatesTo("acosh(1234567.89)", 14.719378760739708); } public void testAsinMethod() throws Exception { assertEvaluatesTo("asin(-1)", -Math.PI/2); assertEvaluatesTo("asin(-0.8660254)", -Math.PI/3); assertEvaluatesTo("asin(-0.7071068)", -Math.PI/4); assertEvaluatesTo("asin(-0.5)", -Math.PI/6); assertEvaluatesTo("asin(0)", 0); assertEvaluatesTo("asin(0.5)", Math.PI/6); assertEvaluatesTo("asin(0.7071068)", Math.PI/4); assertEvaluatesTo("asin(0.8660254)", Math.PI/3); assertEvaluatesTo("asin(1)", Math.PI/2); } public void testAsinhMethod() throws Exception { assertEvaluatesTo("asinh(-1234567.89)", -14.719378760740035); assertEvaluatesTo("asinh(-2.5)", -1.6472311463710958); assertEvaluatesTo("asinh(-1)", -0.8813735870195429); assertEvaluatesTo("asinh(0)", 0); assertEvaluatesTo("asinh(1)", 0.8813735870195429); assertEvaluatesTo("asinh(2.5)", 1.6472311463710958); assertEvaluatesTo("asinh(1234567.89)", 14.719378760740035); } public void testAtanMethod() throws Exception { assertEvaluatesTo("atan(-1.732050808)", -Math.PI/3); assertEvaluatesTo("atan(-1)", -Math.PI/4); assertEvaluatesTo("atan(-0.577350269)", -Math.PI/6); assertEvaluatesTo("atan(0)", 0); assertEvaluatesTo("atan(0.577350269)", Math.PI/6); assertEvaluatesTo("atan(1)", Math.PI/4); assertEvaluatesTo("atan(1.732050808)", Math.PI/3); } public void testAtan2Method() throws Exception { assertEvaluatesTo("atan2(+0,+0)", +0.0); assertEvaluatesTo("atan2(+0,-0)", +Math.PI); assertEvaluatesTo("atan2(-0,+0)", -0.0); assertEvaluatesTo("atan2(-0,-0)", -Math.PI); assertEvaluatesTo("atan2(2,2)", Math.PI/4); assertEvaluatesTo("atan2(-2,2)", -Math.PI/4); assertEvaluatesTo("atan2(2,-2)", Math.PI*3/4); assertEvaluatesTo("atan2(-2,-2)", -Math.PI*3/4); } public void testAtanhMethod() throws Exception { assertEvaluatesTo("atanh(-1)", Double.NEGATIVE_INFINITY); assertEvaluatesTo("atanh(-0.5)", -0.5493061443340549); assertEvaluatesTo("atanh(0)", 0); assertEvaluatesTo("atanh(0.5)", 0.5493061443340549); assertEvaluatesTo("atanh(1)", Double.POSITIVE_INFINITY); } public void testCeilMethod() throws Exception { assertEvaluatesTo("ceil(0)", 0); assertEvaluatesTo("ceil(0.1)", 1); assertEvaluatesTo("ceil(0.9)", 1); assertEvaluatesTo("ceil(25.2)", 26); assertEvaluatesTo("ceil(-0.1)", 0); assertEvaluatesTo("ceil(-0.9)", 0); assertEvaluatesTo("ceil(-1.1)", -1); } public void testCosMethod() throws Exception { assertEvaluatesTo("cos(0)", 1); assertEvaluatesTo("cos(" + Math.PI/2 + ")", 0); assertEvaluatesTo("cos(" + -Math.PI/2 + ")", 0); assertEvaluatesTo("cos(" + Math.PI/4 + ")", 0.7071068); assertEvaluatesTo("cos(" + -Math.PI/4 + ")", 0.7071068); assertEvaluatesTo("cos(" + Math.PI*2/3 + ")",-0.5); assertEvaluatesTo("cos(" + -Math.PI*2/3 + ")", -0.5); assertEvaluatesTo("cos(" + Math.PI/6 + ")", 0.8660254); assertEvaluatesTo("cos(" + -Math.PI/6 + ")", 0.8660254); } public void testCoshMethod() throws Exception { assertEvaluatesTo("cosh(0)", 1); assertEvaluatesTo("cosh(-1)", 1.5430806348152437); assertEvaluatesTo("cosh(1)", 1.5430806348152437); assertEvaluatesTo("cosh(-0.5)", 1.1276259652063807); assertEvaluatesTo("cosh(0.5)", 1.1276259652063807); assertEvaluatesTo("cosh(-12.3456789)", 114982.09728671524); assertEvaluatesTo("cosh(12.3456789)", 114982.09728671524); } public void testExpMethod() throws Exception { assertEvaluatesTo("exp(0)", 1); assertEvaluatesTo("exp(-1)", 0.36787944117); assertEvaluatesTo("exp(1)", 2.71828182846); assertEvaluatesTo("exp(-0.5)", 0.60653065971); assertEvaluatesTo("exp(0.5)", 1.6487212707); assertEvaluatesTo("exp(-12.3456789)", 0.0000043485); assertEvaluatesTo("exp(12.3456789)", 229964.194569); } public void testFloorMethod() throws Exception { assertEvaluatesTo("floor(0)", 0); assertEvaluatesTo("floor(0.1)", 0); assertEvaluatesTo("floor(0.9)", 0); assertEvaluatesTo("floor(25.2)", 25); assertEvaluatesTo("floor(-0.1)", -1); assertEvaluatesTo("floor(-0.9)", -1); assertEvaluatesTo("floor(-1.1)", -2); } public void testHaversinMethod() throws Exception { assertEvaluatesTo("haversin(40.7143528,-74.0059731,40.759011,-73.9844722)", 5.285885589128); } public void testLnMethod() throws Exception { assertEvaluatesTo("ln(0)", Double.NEGATIVE_INFINITY); assertEvaluatesTo("ln(" + Math.E + ")", 1); assertEvaluatesTo("ln(-1)", Double.NaN); assertEvaluatesTo("ln(1)", 0); assertEvaluatesTo("ln(0.5)", -0.69314718056); assertEvaluatesTo("ln(12.3456789)", 2.51330611521); } public void testLog10Method() throws Exception { assertEvaluatesTo("log10(0)", Double.NEGATIVE_INFINITY); assertEvaluatesTo("log10(1)", 0); assertEvaluatesTo("log10(-1)", Double.NaN); assertEvaluatesTo("log10(0.5)", -0.3010299956639812); assertEvaluatesTo("log10(12.3456789)", 1.0915149771692705); } public void testLognMethod() throws Exception { assertEvaluatesTo("logn(2, 0)", Double.NEGATIVE_INFINITY); assertEvaluatesTo("logn(2, 1)", 0); assertEvaluatesTo("logn(2, -1)", Double.NaN); assertEvaluatesTo("logn(2, 0.5)", -1); assertEvaluatesTo("logn(2, 12.3456789)", 3.6259342686489378); assertEvaluatesTo("logn(2.5, 0)", Double.NEGATIVE_INFINITY); assertEvaluatesTo("logn(2.5, 1)", 0); assertEvaluatesTo("logn(2.5, -1)", Double.NaN); assertEvaluatesTo("logn(2.5, 0.5)", -0.75647079736603); assertEvaluatesTo("logn(2.5, 12.3456789)", 2.7429133874016745); } public void testMaxMethod() throws Exception { assertEvaluatesTo("max(0, 0)", 0); assertEvaluatesTo("max(1, 0)", 1); assertEvaluatesTo("max(0, -1)", 0); assertEvaluatesTo("max(-1, 0)", 0); assertEvaluatesTo("max(25, 23)", 25); } public void testMinMethod() throws Exception { assertEvaluatesTo("min(0, 0)", 0); assertEvaluatesTo("min(1, 0)", 0); assertEvaluatesTo("min(0, -1)", -1); assertEvaluatesTo("min(-1, 0)", -1); assertEvaluatesTo("min(25, 23)", 23); } public void testPowMethod() throws Exception { assertEvaluatesTo("pow(0, 0)", 1); assertEvaluatesTo("pow(0.1, 2)", 0.01); assertEvaluatesTo("pow(0.9, -1)", 1.1111111111111112); assertEvaluatesTo("pow(2.2, -2.5)", 0.13929749224447147); assertEvaluatesTo("pow(5, 3)", 125); assertEvaluatesTo("pow(-0.9, 5)", -0.59049); assertEvaluatesTo("pow(-1.1, 2)", 1.21); } public void testSinMethod() throws Exception { assertEvaluatesTo("sin(0)", 0); assertEvaluatesTo("sin(" + Math.PI/2 + ")", 1); assertEvaluatesTo("sin(" + -Math.PI/2 + ")", -1); assertEvaluatesTo("sin(" + Math.PI/4 + ")", 0.7071068); assertEvaluatesTo("sin(" + -Math.PI/4 + ")", -0.7071068); assertEvaluatesTo("sin(" + Math.PI*2/3 + ")", 0.8660254); assertEvaluatesTo("sin(" + -Math.PI*2/3 + ")", -0.8660254); assertEvaluatesTo("sin(" + Math.PI/6 + ")", 0.5); assertEvaluatesTo("sin(" + -Math.PI/6 + ")", -0.5); } public void testSinhMethod() throws Exception { assertEvaluatesTo("sinh(0)", 0); assertEvaluatesTo("sinh(-1)", -1.1752011936438014); assertEvaluatesTo("sinh(1)", 1.1752011936438014); assertEvaluatesTo("sinh(-0.5)", -0.52109530549); assertEvaluatesTo("sinh(0.5)", 0.52109530549); assertEvaluatesTo("sinh(-12.3456789)", -114982.09728236674); assertEvaluatesTo("sinh(12.3456789)", 114982.09728236674); } public void testSqrtMethod() throws Exception { assertEvaluatesTo("sqrt(0)", 0); assertEvaluatesTo("sqrt(-1)", Double.NaN); assertEvaluatesTo("sqrt(0.49)", 0.7); assertEvaluatesTo("sqrt(49)", 7); } public void testTanMethod() throws Exception { assertEvaluatesTo("tan(0)", 0); assertEvaluatesTo("tan(-1)", -1.55740772465); assertEvaluatesTo("tan(1)", 1.55740772465); assertEvaluatesTo("tan(-0.5)", -0.54630248984); assertEvaluatesTo("tan(0.5)", 0.54630248984); assertEvaluatesTo("tan(-1.3)", -3.60210244797); assertEvaluatesTo("tan(1.3)", 3.60210244797); } public void testTanhMethod() throws Exception { assertEvaluatesTo("tanh(0)", 0); assertEvaluatesTo("tanh(-1)", -0.76159415595); assertEvaluatesTo("tanh(1)", 0.76159415595); assertEvaluatesTo("tanh(-0.5)", -0.46211715726); assertEvaluatesTo("tanh(0.5)", 0.46211715726); assertEvaluatesTo("tanh(-12.3456789)", -0.99999999996); assertEvaluatesTo("tanh(12.3456789)", 0.99999999996); } }
apache-2.0
heavenlyhash/gitblit
src/main/java/com/gitblit/models/SubmoduleModel.java
1266
/* * Copyright 2012 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit.models; import java.io.Serializable; /** * SubmoduleModel is a serializable model class that represents a git submodule * definition. * * @author James Moger * */ public class SubmoduleModel implements Serializable { private static final long serialVersionUID = 1L; public final String name; public final String path; public final String url; public boolean hasSubmodule; public String gitblitPath; public SubmoduleModel(String name, String path, String url) { this.name = name; this.path = path; this.url = url; } public String toString() { return path + "=" + url; } }
apache-2.0
juanavelez/hazelcast
hazelcast/src/test/java/com/hazelcast/map/QueryListenerTest.java
6032
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map; import com.hazelcast.config.Config; import com.hazelcast.core.EntryAdapter; import com.hazelcast.core.EntryEvent; import com.hazelcast.core.EntryListener; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.IMap; import com.hazelcast.core.MapEvent; import com.hazelcast.query.Predicate; import com.hazelcast.query.SqlPredicate; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.io.Serializable; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import static org.junit.Assert.assertEquals; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class QueryListenerTest extends HazelcastTestSupport { @Test public void testMapQueryListener() throws InterruptedException { Config config = getConfig(); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance3 = nodeFactory.newHazelcastInstance(config); final IMap<Object, Object> map = instance1.getMap("testMapQueryListener"); final Object[] addedKey = new Object[1]; final Object[] addedValue = new Object[1]; final Object[] updatedKey = new Object[1]; final Object[] oldValue = new Object[1]; final Object[] newValue = new Object[1]; final Object[] removedKey = new Object[1]; final Object[] removedValue = new Object[1]; EntryListener<Object, Object> listener = new EntryAdapter<Object, Object>() { public void entryAdded(EntryEvent<Object, Object> event) { addedKey[0] = event.getKey(); addedValue[0] = event.getValue(); } public void entryRemoved(EntryEvent<Object, Object> event) { removedKey[0] = event.getKey(); removedValue[0] = event.getOldValue(); } public void entryUpdated(EntryEvent<Object, Object> event) { updatedKey[0] = event.getKey(); oldValue[0] = event.getOldValue(); newValue[0] = event.getValue(); } public void entryEvicted(EntryEvent<Object, Object> event) { } @Override public void mapEvicted(MapEvent event) { } @Override public void mapCleared(MapEvent event) { } }; map.addEntryListener(listener, new StartsWithPredicate("a"), null, true); map.put("key1", "abc"); map.put("key2", "bcd"); map.put("key2", "axyz"); map.remove("key1"); Thread.sleep(1000); assertEquals(addedKey[0], "key1"); assertEquals(addedValue[0], "abc"); assertEquals(updatedKey[0], "key2"); assertEquals(oldValue[0], "bcd"); assertEquals(newValue[0], "axyz"); assertEquals(removedKey[0], "key1"); assertEquals(removedValue[0], "abc"); } @Test public void testMapQueryListener2() throws InterruptedException { Config cfg = getConfig(); TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(3); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(cfg); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(cfg); HazelcastInstance instance3 = nodeFactory.newHazelcastInstance(cfg); final IMap<Object, Object> map = instance1.getMap("testMapQueryListener2"); final AtomicInteger addCount = new AtomicInteger(0); EntryListener<Object, Object> listener = new EntryAdapter<Object, Object>() { public void entryAdded(EntryEvent<Object, Object> event) { addCount.incrementAndGet(); } }; Predicate<Object, Object> predicate = new SqlPredicate("age >= 50"); map.addEntryListener(listener, predicate, null, false); int size = 100; for (int i = 0; i < size; i++) { Person person = new Person("name", i); map.put(i, person); } Thread.sleep(1000); assertEquals(50, addCount.get()); } static class StartsWithPredicate implements Predicate<Object, Object>, Serializable { String pref; StartsWithPredicate(String pref) { this.pref = pref; } @Override public boolean apply(Map.Entry<Object, Object> mapEntry) { String val = (String) mapEntry.getValue(); if (val == null) { return false; } if (val.startsWith(pref)) { return true; } return false; } } static class Person implements Serializable { String name; int age; Person() { } Person(String name, int age) { this.name = name; this.age = age; } } }
apache-2.0
paplorinc/intellij-community
java/java-impl/src/com/intellij/refactoring/replaceConstructorWithBuilder/ReplaceConstructorWithBuilderViewDescriptor.java
1084
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.replaceConstructorWithBuilder; import com.intellij.psi.PsiElement; import com.intellij.refactoring.ui.UsageViewDescriptorAdapter; import org.jetbrains.annotations.NotNull; public class ReplaceConstructorWithBuilderViewDescriptor extends UsageViewDescriptorAdapter{ @Override @NotNull public PsiElement[] getElements() { return PsiElement.EMPTY_ARRAY; } @Override public String getProcessedElementsHeader() { return ""; } }
apache-2.0
rpiotti/Web-Karma
karma-common/src/main/java/edu/isi/karma/rep/sources/InvocationManager.java
11985
/******************************************************************************* * Copyright 2012 University of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This code was developed by the Information Integration Group as part * of the Karma project at the Information Sciences Institute of the * University of Southern California. For more information, publications, * and related projects, please see: http://www.isi.edu/integration ******************************************************************************/ package edu.isi.karma.rep.sources; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import edu.isi.karma.common.HttpMethods; import edu.isi.karma.util.RandomGUID; import edu.isi.karma.webserver.KarmaException; public class InvocationManager { static Logger logger = LoggerFactory.getLogger(InvocationManager.class); private List<URL> requestURLs; private List<String> idList; private List<Invocation> invocations; private Table serviceData; private String urlColumnName; private JsonArray json; private JsonArray jsonUrl; private JsonArray jsonInputs; private JsonArray jsonOutputs; private JsonArray jsonUrlAndInputs; private JsonArray jsonUrlAndOutputs; private JsonArray jsonInputsAndOutputs; private String encoding; public InvocationManager(String urlColumnName, List<String> idList, List<String> requestURLStrings, String encoding) throws MalformedURLException, KarmaException { this.urlColumnName = (urlColumnName == null || urlColumnName.trim().length() == 0) ? "url" : urlColumnName; this.idList = idList; this.encoding = encoding; requestURLs = URLManager.getURLsFromStrings(requestURLStrings); if (requestURLs == null || requestURLs.size() == 0) throw new KarmaException("Cannot model a service without any request example."); this.serviceData = null; this.invocations = new ArrayList<Invocation>(); json = new JsonArray(); jsonUrl = new JsonArray(); jsonInputs = new JsonArray(); jsonOutputs = new JsonArray(); jsonUrlAndInputs = new JsonArray(); jsonUrlAndOutputs = new JsonArray(); jsonInputsAndOutputs = new JsonArray(); invokeAndGetResponse(); } public InvocationManager(String urlColumnName, String requestURLString) throws MalformedURLException, KarmaException { this.urlColumnName = (urlColumnName == null || urlColumnName.trim().length() == 0) ? "url" : urlColumnName; this.idList = new ArrayList<String>(); this.idList.add("1"); List<String> requestURLList = new ArrayList<String>(); requestURLList.add(requestURLString); requestURLs = URLManager.getURLsFromStrings(requestURLList); if (requestURLs == null || requestURLs.size() == 0) throw new KarmaException("Cannot model a service without any request example."); this.serviceData = null; this.invocations = new ArrayList<Invocation>(); json = new JsonArray(); jsonUrl = new JsonArray(); jsonInputs = new JsonArray(); jsonOutputs = new JsonArray(); jsonUrlAndInputs = new JsonArray(); jsonUrlAndOutputs = new JsonArray(); jsonInputsAndOutputs = new JsonArray(); invokeAndGetResponse(); } private void invokeAndGetResponse() { for (int i = 0; i < requestURLs.size(); i++) { URL url = requestURLs.get(i); String requestId = null; if (idList != null) requestId = idList.get(i); Request request = new Request(url); Invocation invocation = new Invocation(requestId, request, encoding); logger.info("Invoking the service " + request.getUrl().toString() + " ..."); invocation.invokeAPI(); invocations.add(invocation); } List<Table> invocationData = new ArrayList<Table>(); for (Invocation inv : this.invocations) { populateJsonArraysFromInvocation(inv); invocationData.add(inv.getJointInputAndOutput()); } logger.info("Integrating the results of all invocations ..."); Table result = Table.union(invocationData); logger.info("Integrating finished."); this.serviceData = result; } private void populateJsonArraysFromInvocation(Invocation inv) { try { JsonElement out = new JsonParser().parse(inv.getJsonResponse()); // JsonArray outArray = new JsonArray(); // outArray.add(out); this.jsonOutputs.add(out); JsonObject url = new JsonObject(); url.addProperty(this.urlColumnName, inv.getRequest().getUrl().toString()); // JsonArray urlArray = new JsonArray(); // urlArray.add(url); this.jsonUrl.add(url); JsonObject in = new JsonObject(); for (Attribute att : inv.getRequest().getAttributes()) in.addProperty(att.getName(), att.getValue()); // JsonArray inArray = new JsonArray(); // inArray.add(in); this.jsonInputs.add(in); JsonObject urlAndIn = new JsonObject(); urlAndIn.addProperty(this.urlColumnName, inv.getRequest().getUrl().toString()); for (Attribute att : inv.getRequest().getAttributes()) urlAndIn.addProperty(att.getName(), att.getValue()); this.jsonUrlAndInputs.add(urlAndIn); JsonArray urlAndOut = new JsonArray(); urlAndOut.add(url); urlAndOut.add(out); this.jsonUrlAndOutputs.add(urlAndOut); JsonArray inAndOut = new JsonArray(); inAndOut.add(in); inAndOut.add(out); this.jsonInputsAndOutputs.add(inAndOut); JsonArray all = new JsonArray(); all.add(urlAndIn); all.add(out); this.json.add(all); } catch (Exception e) { logger.debug("Error in parsing json returned by the invocation " + inv.getRequest().getUrl().toString()); } } public String getServiceJson(boolean includeURL, boolean includeInputAttributes, boolean includeOutputAttributes) { if (includeURL && includeInputAttributes && includeOutputAttributes) return this.json.toString(); else if (includeURL && includeInputAttributes) return this.jsonUrlAndInputs.toString(); else if (includeURL && includeOutputAttributes) return this.jsonUrlAndOutputs.toString(); else if (includeInputAttributes && includeOutputAttributes) return this.jsonInputsAndOutputs.toString(); else if (includeURL) return this.jsonUrl.toString(); else if (includeInputAttributes) return this.jsonInputs.toString(); else if (includeOutputAttributes) return this.jsonOutputs.toString(); else return ""; } public Table getServiceData(boolean includeURL, boolean includeInputAttributes, boolean includeOutputAttributes) { if (includeURL && includeInputAttributes && includeOutputAttributes) return this.serviceData; List<Attribute> headers = this.serviceData.getHeaders(); List<List<String>> values = this.serviceData.getValues(); Table newTable = new Table(); List<Attribute> newHeader = new ArrayList<Attribute>(); List<List<String>> newValues = new ArrayList<List<String>>(); List<String> newRowIds = new ArrayList<String>(this.serviceData.getRowIds()); List<Integer> includingColumns = new ArrayList<Integer>(); if (headers != null) { if (includeURL && headers.size() > 0) includingColumns.add(0); for (int i = 1; i < this.serviceData.getHeaders().size(); i++) { if (includeInputAttributes && headers.get(i).getIOType() == IOType.INPUT) includingColumns.add(i); if (includeOutputAttributes && headers.get(i).getIOType() == IOType.OUTPUT) includingColumns.add(i); } } for (Integer colIndex : includingColumns) { newHeader.add(headers.get(colIndex)); } for (List<String> vals : values) { List<String> rowVals = new ArrayList<String>(); for (Integer colIndex : includingColumns) rowVals.add(vals.get(colIndex)); newValues.add(rowVals); } newTable.setHeaders(newHeader); newTable.setValues(newValues); newTable.setRowIds(newRowIds); return newTable; } public Table getServiceData() { return getServiceData(true, true, true); } public String getServiceJson(boolean includeInputAttributes) { if (includeInputAttributes) return getServiceJson(true, true, true); return getServiceJson(false, false, true); } private List<Attribute> getInputAttributes() { List<Attribute> inAttributes = new ArrayList<Attribute>(); Table serviceTable = getServiceData(); for (Attribute p : serviceTable.getHeaders()) { if (p.getIOType().equalsIgnoreCase(IOType.INPUT)) { inAttributes.add(p); } } return inAttributes; } private List<Attribute> getOutputAttributes() { List<Attribute> outAttributes = new ArrayList<Attribute>(); Table serviceTable = getServiceData(); for (Attribute p : serviceTable.getHeaders()) { if (p.getIOType().equalsIgnoreCase(IOType.OUTPUT)) outAttributes.add(p); } return outAttributes; } /** * This method creates a new service model which includes only the * service endpoint, http method, input and output attributes * @return */ public WebService getInitialServiceModel(String serviceName) { String guid = new RandomGUID().toString(); // guid = "E9C3F8D3-F778-5C4B-E089-C1749D50AE1F"; URL sampleUrl = requestURLs.get(0); if (sampleUrl == null) return null; WebService service = null; if (serviceName == null || serviceName.trim().length() == 0) service = new WebService(guid, sampleUrl); else service = new WebService(guid, serviceName, sampleUrl); service.setMethod(HttpMethods.GET.name()); service.setInputAttributes(getInputAttributes()); service.setOutputAttributes(getOutputAttributes()); return service; } /*public static void main(String[] args) { // String s1 = "http://colo-vm10.isi.edu:8080/DovetailService/GetSampleData?sourceName=KDD-02-B-TOSIG"; String s1 = "http://api.geonames.org/neighbourhood?lat=40.78343&lng=-73.96625&username=karma"; // String s1 = "http://api.geonames.org/postalCodeCountryInfo?username=karma"; // String s2 = "http://api.geonames.org/neighbourhood?lat=40.7&lng=-73.9&username=karma"; // String s3 = "http://api.geonames.org/neighbourhood?lat=40.9&lng=-73.9&username=karma"; List<String> urls = new ArrayList<String>(); urls.add(s1); // urls.add(s2); // urls.add(s3); List<String> ids = new ArrayList<String>(); ids.add("1"); // ids.add("2"); // ids.add("3"); try { InvocationManager sb = new InvocationManager(null, ids, urls, "UTF-8"); Table tb = sb.getServiceData(false, false, true); // String str = tb.asCSV(); // File f = new File("csv"); // PrintWriter pw = new PrintWriter(f); // pw.write(str); // pw.close(); logger.debug(tb.getPrintInfo()); WebService service = sb.getInitialServiceModel(null); // just for test service.getInputAttributes().get(0).sethNodeId("HN1"); service.getInputAttributes().get(1).sethNodeId("HN2"); service.getOutputAttributes().get(4).sethNodeId("HN3"); service.getOutputAttributes().get(6).sethNodeId("HN4"); service.getOutputAttributes().get(5).sethNodeId("HN5"); service.getOutputAttributes().get(3).sethNodeId("HN6"); service.print(); service.updateModel(Test.getGeoNamesNeighbourhoodTree()); String dir = Repository.Instance().SOURCE_REPOSITORY_DIR; service.getInputModel().writeJenaModelToFile(dir + "model", "N3"); System.out.println(service.getInputModel().getSparql(null)); } catch (Exception e) { e.printStackTrace(); } }*/ }
apache-2.0
squidsolutions/bonecp
bonecp/src/test/java/com/jolbox/bonecp/TestCloseThreadMonitor.java
3341
/** * Copyright 2010 Wallace Wadge * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package com.jolbox.bonecp; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import static org.easymock.EasyMock.*; /** Test for CloseThreadMonitor class * @author Wallace * */ public class TestCloseThreadMonitor { /** Mock handle. */ private ConnectionHandle mockConnection; /** Mock handle. */ private Logger mockLogger; /** Mock handle. */ private Thread mockThread; /** Class under test. */ private CloseThreadMonitor testClass; /** * Test setup * @throws NoSuchFieldException * @throws SecurityException * @throws IllegalAccessException * @throws IllegalArgumentException */ @Before public void before() throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException{ mockConnection = createNiceMock(ConnectionHandle.class); mockThread = createNiceMock(Thread.class); testClass = new CloseThreadMonitor(mockThread, mockConnection, "fakeexception", 0); mockLogger = TestUtils.mockLogger(testClass.getClass()); } /** Tests the normal case. * @throws InterruptedException */ @Test public void testConnectionCorrectlyClosed() throws InterruptedException{ mockThread.join(); // expectLastCall().once(); expect(mockConnection.isClosed()).andReturn(true).once(); replay(mockConnection, mockLogger, mockThread); testClass.run(); verify(mockConnection, mockLogger, mockThread); } /** Test case where the connection is not closed. * @throws SecurityException * @throws NoSuchFieldException * @throws IllegalArgumentException * @throws IllegalAccessException */ @Test public void testConnectionNotClosed() throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException{ mockLogger.error((String)anyObject()); expectLastCall().once(); expect(mockConnection.isClosed()).andReturn(false).once(); expect(mockConnection.getThreadUsingConnection()).andReturn(mockThread).once(); replay(mockConnection, mockLogger); testClass.run(); verify(mockConnection, mockLogger); } /** Code coverage. * @throws SecurityException * @throws NoSuchFieldException * @throws IllegalArgumentException * @throws IllegalAccessException * @throws InterruptedException */ @Test public void testConnectionInterrupted() throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException, InterruptedException{ expect(mockConnection.isClosed()).andThrow(new RuntimeException()).once(); replay(mockConnection, mockLogger); testClass.run(); verify(mockConnection, mockLogger); } }
apache-2.0
monetate/druid
indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/distribution/ArrayOfStringsNullSafeSerde.java
3797
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.common.task.batch.parallel.distribution; import org.apache.datasketches.ArrayOfItemsSerDe; import org.apache.datasketches.ArrayOfStringsSerDe; import org.apache.datasketches.Util; import org.apache.datasketches.memory.Memory; import org.apache.datasketches.memory.WritableMemory; import org.apache.druid.data.input.StringTuple; import org.apache.druid.java.util.common.IAE; import java.nio.charset.StandardCharsets; /** * Serde for {@link StringTuple}. * <p> * The implementation is the same as {@link ArrayOfStringsSerDe}, except this * class handles null String values as well. */ public class ArrayOfStringsNullSafeSerde extends ArrayOfItemsSerDe<String> { private static final int NULL_STRING_LENGTH = -1; @Override public byte[] serializeToByteArray(final String[] items) { // Determine the bytes for each String int length = 0; final byte[][] itemsBytes = new byte[items.length][]; for (int i = 0; i < items.length; i++) { length += Integer.BYTES; // Do not initialize the byte array for a null String if (items[i] != null) { itemsBytes[i] = items[i].getBytes(StandardCharsets.UTF_8); length += itemsBytes[i].length; } } // Create a single byte array for all the Strings final byte[] bytes = new byte[length]; final WritableMemory mem = WritableMemory.writableWrap(bytes); long offsetBytes = 0; for (int i = 0; i < items.length; i++) { if (itemsBytes[i] != null) { // Write the length of the array and the array itself mem.putInt(offsetBytes, itemsBytes[i].length); offsetBytes += Integer.BYTES; mem.putByteArray(offsetBytes, itemsBytes[i], 0, itemsBytes[i].length); offsetBytes += itemsBytes[i].length; } else { mem.putInt(offsetBytes, NULL_STRING_LENGTH); offsetBytes += Integer.BYTES; } } return bytes; } @Override public String[] deserializeFromMemory(final Memory mem, final int numItems) { final String[] array = new String[numItems]; long offsetBytes = 0; for (int i = 0; i < numItems; i++) { // Read the length of the ith String Util.checkBounds(offsetBytes, Integer.BYTES, mem.getCapacity()); final int strLength = mem.getInt(offsetBytes); offsetBytes += Integer.BYTES; if (strLength >= 0) { // Read the bytes for the String final byte[] bytes = new byte[strLength]; Util.checkBounds(offsetBytes, strLength, mem.getCapacity()); mem.getByteArray(offsetBytes, bytes, 0, strLength); offsetBytes += strLength; array[i] = new String(bytes, StandardCharsets.UTF_8); } else if (strLength != NULL_STRING_LENGTH) { throw new IAE( "Illegal strLength [%s] at offset [%s]. Must be %s, 0 or a positive integer.", strLength, offsetBytes, NULL_STRING_LENGTH ); } } return array; } }
apache-2.0
sandor-balazs/nosql-java
oracle/src/main/java/com/github/sandor_balazs/nosql_java/web/rest/dto/KeyAndPasswordDTO.java
487
package com.github.sandor_balazs.nosql_java.web.rest.dto; public class KeyAndPasswordDTO { private String key; private String newPassword; public KeyAndPasswordDTO() { } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public String getNewPassword() { return newPassword; } public void setNewPassword(String newPassword) { this.newPassword = newPassword; } }
bsd-2-clause
g-votte/eclipse-collections
eclipse-collections/src/main/java/org/eclipse/collections/impl/block/procedure/MaxComparatorProcedure.java
1290
/* * Copyright (c) 2016 Goldman Sachs. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Eclipse Distribution License v. 1.0 which accompany this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. */ package org.eclipse.collections.impl.block.procedure; import java.util.Comparator; import org.eclipse.collections.api.block.procedure.Procedure; /** * Implementation of {@link Procedure} that holds on to the maximum element seen so far, * determined by the {@link Comparator}. */ public class MaxComparatorProcedure<T> extends ComparatorProcedure<T> { private static final long serialVersionUID = 1L; public MaxComparatorProcedure(Comparator<? super T> comparator) { super(comparator); } @Override public void value(T each) { if (!this.visitedAtLeastOnce) { this.visitedAtLeastOnce = true; this.result = each; } else if (this.comparator.compare(each, this.result) > 0) { this.result = each; } } }
bsd-3-clause
webbit/webbit
src/main/java/org/webbitserver/stub/StubConnection.java
3190
package org.webbitserver.stub; import org.webbitserver.EventSourceConnection; import org.webbitserver.HttpRequest; import org.webbitserver.WebSocketConnection; import java.util.LinkedList; import java.util.List; import java.util.concurrent.Executor; /** * Implementation of {@link EventSourceConnection} and {@link WebSocketConnection} that is easy to construct and * makes it easy to inspect results. Useful for testing. */ public class StubConnection extends StubDataHolder implements EventSourceConnection, WebSocketConnection { private final List<String> sentMessages = new LinkedList<String>(); private final List<byte[]> sentBinaryMessages = new LinkedList<byte[]>(); private final List<byte[]> sentPings = new LinkedList<byte[]>(); private final List<byte[]> sentPongs = new LinkedList<byte[]>(); private boolean closed = false; private HttpRequest httpRequest; private String version = null; public StubConnection(HttpRequest httpRequest) { this.httpRequest = httpRequest; } public StubConnection() { this(new StubHttpRequest()); } @Override public HttpRequest httpRequest() { return httpRequest; } @Override public StubConnection send(org.webbitserver.EventSourceMessage message) { return send(message.build()); } public StubConnection httpRequest(HttpRequest httpRequest) { this.httpRequest = httpRequest; return this; } @Override public StubConnection send(String message) { sentMessages.add(message); return this; } @Override public StubConnection send(byte[] message) { return send(message, 0, message.length); } @Override public StubConnection send(byte[] message, int offset, int length) { byte[] subMessage = new byte[length]; System.arraycopy(message, offset, subMessage, 0, length); sentBinaryMessages.add(subMessage); return this; } @Override public StubConnection ping(byte[] message) { sentPings.add(message); return this; } @Override public StubConnection pong(byte[] message) { sentPongs.add(message); return this; } @Override public StubConnection close() { closed = true; return this; } public boolean closed() { return closed; } public List<String> sentMessages() { return sentMessages; } public List<byte[]> sentBinaryMessages() { return sentBinaryMessages; } public List<byte[]> sentPings() { return sentPings; } public List<byte[]> sentPongs() { return sentPongs; } @Override public StubConnection data(String key, Object value) { super.data(key, value); return this; } @Override public Executor handlerExecutor() { return this; } @Override public String version() { return version; } public StubConnection version(String version) { this.version = version; return this; } @Override public void execute(Runnable command) { command.run(); } }
bsd-3-clause
sebbrudzinski/motech
modules/tasks/tasks/src/test/java/org/motechproject/tasks/service/util/KeyEvaluatorTest.java
2767
package org.motechproject.tasks.service.util; import org.joda.time.DateTime; import org.junit.Test; import org.motechproject.commons.api.MotechException; import org.motechproject.commons.date.util.DateUtil; import org.motechproject.tasks.service.util.KeyEvaluator; import static junit.framework.Assert.assertEquals; public class KeyEvaluatorTest { @Test public void shouldProperlyApplyManipulations() throws Exception { String string = "ala-has-a-cat"; DateTime now = DateUtil.now(); String toString = now.toString(); String toStringWithPattern = now.toString("yyyy-MM-dd"); KeyEvaluator keyEvaluator = new KeyEvaluator(null); String pastDate = "2015-05-15"; String timeZone = new DateTime(pastDate).toString("Z"); //Figure out the correct time zone for the given date and locale assertEquals("lower_case", keyEvaluator.manipulate("tolower", "LOWER_CASE")); assertEquals("UPPER_CASE", keyEvaluator.manipulate("toupper", "upper_case")); assertEquals("Capitalize", keyEvaluator.manipulate("capitalize", "capitalize")); assertEquals("My+sample+message", keyEvaluator.manipulate("urlencode", "My sample message")); assertEquals("37%2365%4078%2490", keyEvaluator.manipulate("URLEncode", "37#65@78$90")); assertEquals("67890", keyEvaluator.manipulate("substring(5)", "1234567890")); assertEquals("67", keyEvaluator.manipulate("substring(5,7)", "1234567890")); assertEquals(string, keyEvaluator.manipulate("join(-)", "ala has a cat")); assertEquals("ala", keyEvaluator.manipulate("split(-,0)", string)); assertEquals("cat", keyEvaluator.manipulate("split(-,3)", string)); assertEquals(pastDate + " 11:32 " + timeZone, keyEvaluator.manipulate("parseDate(yyyy/dd/MM hh:mm)", "2015/15/05 11:32")); assertEquals(toStringWithPattern, keyEvaluator.manipulate("datetime(yyyy-MM-dd)", toString)); assertEquals(now.plusDays(1).toString(), keyEvaluator.manipulate("plusDays(1)", toString)); assertEquals(now.minusDays(1).toString(), keyEvaluator.manipulate("minusDays(1)", toString)); assertEquals(now.plusHours(2).toString(), keyEvaluator.manipulate("plusHours(2)", toString)); assertEquals(now.minusHours(2).toString(), keyEvaluator.manipulate("minusHours(2)", toString)); assertEquals(now.plusMinutes(20).toString(), keyEvaluator.manipulate("plusMinutes(20)", toString)); assertEquals(now.minusMinutes(20).toString(), keyEvaluator.manipulate("minusMinutes(20)", toString)); } @Test(expected = MotechException.class) public void shouldThrowExceptionWhenManipulationIsUnknown(){ new KeyEvaluator(null).manipulate("undefined", "something"); } }
bsd-3-clause
kakada/dhis2
dhis-web/dhis-web-commons/src/main/java/org/hisp/dhis/commons/action/GetCategoryOptionCombosAction.java
4992
package org.hisp.dhis.commons.action; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.Set; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategoryCombo; import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo; import org.hisp.dhis.dataelement.DataElementCategoryService; import org.hisp.dhis.dataelement.DataElementService; import com.opensymphony.xwork2.Action; /** * @author Lars Helge Overland */ public class GetCategoryOptionCombosAction implements Action { // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private DataElementService dataElementService; public void setDataElementService( DataElementService dataElementService ) { this.dataElementService = dataElementService; } private DataElementCategoryService categoryService; public void setCategoryService( DataElementCategoryService categoryService ) { this.categoryService = categoryService; } // ------------------------------------------------------------------------- // Input // ------------------------------------------------------------------------- private Integer id; public void setId( Integer id ) { this.id = id; } private Integer categoryComboId; public void setCategoryComboId( Integer categoryComboId ) { this.categoryComboId = categoryComboId; } private String categoryComboUid; public void setCategoryComboUid( String categoryComboUid ) { this.categoryComboUid = categoryComboUid; } // ------------------------------------------------------------------------- // Output // ------------------------------------------------------------------------- private Set<DataElementCategoryOptionCombo> categoryOptionCombos; public Set<DataElementCategoryOptionCombo> getCategoryOptionCombos() { return categoryOptionCombos; } // ------------------------------------------------------------------------- // Action implementation // ------------------------------------------------------------------------- @Override public String execute() { if ( id != null ) { DataElement dataElement = dataElementService.getDataElement( id ); if ( dataElement != null ) { DataElementCategoryCombo categoryCombo = dataElement.getCategoryCombo(); if ( categoryCombo != null ) { categoryOptionCombos = categoryCombo.getOptionCombos(); } } } else if ( categoryComboId != null ) { DataElementCategoryCombo categoryCombo = categoryService.getDataElementCategoryCombo( categoryComboId ); if ( categoryCombo != null ) { categoryOptionCombos = categoryCombo.getOptionCombos(); } } else if ( categoryComboUid != null ) { DataElementCategoryCombo categoryCombo = categoryService.getDataElementCategoryCombo( categoryComboUid ); if ( categoryCombo != null ) { categoryOptionCombos = categoryCombo.getOptionCombos(); } } return SUCCESS; } }
bsd-3-clause
plumer/codana
tomcat_files/8.0.22/Foo (2).java
910
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package util.b; /** * Tester class used for {@link javax.el.TestImportHandler}. */ public class Foo { }
mit
stevenuray/XChange
xchange-btce/src/main/java/org/knowm/xchange/btce/v3/BTCEAuthenticated.java
6814
package org.knowm.xchange.btce.v3; import java.io.IOException; import java.math.BigDecimal; import javax.ws.rs.Consumes; import javax.ws.rs.FormParam; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.knowm.xchange.btce.v3.dto.account.BTCEAccountInfoReturn; import org.knowm.xchange.btce.v3.dto.account.BTCEWithDrawInfoReturn; import org.knowm.xchange.btce.v3.dto.trade.BTCECancelOrderReturn; import org.knowm.xchange.btce.v3.dto.trade.BTCEOpenOrdersReturn; import org.knowm.xchange.btce.v3.dto.trade.BTCEOrder; import org.knowm.xchange.btce.v3.dto.trade.BTCEPlaceOrderReturn; import org.knowm.xchange.btce.v3.dto.trade.BTCETradeHistoryReturn; import org.knowm.xchange.btce.v3.dto.trade.BTCETransHistoryReturn; import si.mazi.rescu.ParamsDigest; import si.mazi.rescu.SynchronizedValueFactory; /** * @author Matija Mazi */ @Path("/") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) @Produces(MediaType.APPLICATION_JSON) public interface BTCEAuthenticated extends BTCE { /** * @param from The ID of the transaction to start displaying with; default 0 * @param count The number of transactions for displaying default 1000 * @param fromId The ID of the transaction to start displaying with default 0 * @param endId The ID of the transaction to finish displaying with default +inf * @param order sorting ASC or DESC default DESC * @param since When to start displaying? UNIX time default 0 * @param end When to finish displaying? UNIX time default +inf * @return {success=1, return={funds={usd=0, rur=0, eur=0, btc=0.1, ltc=0, nmc=0}, rights={info=1, trade=1, withdraw=1}, transaction_count=1, * open_orders=0, server_time=1357678428}} */ @POST @Path("tapi") @FormParam("method") BTCEAccountInfoReturn getInfo(@HeaderParam("Key") String apiKey, @HeaderParam("Sign") ParamsDigest signer, @FormParam("nonce") SynchronizedValueFactory<Long> nonce, @FormParam("from") Long from, @FormParam("count") Long count, @FormParam("from_id") Long fromId, @FormParam("end_id") Long endId, @FormParam("order") SortOrder order, @FormParam("since") Long since, @FormParam("end") Long end) throws IOException; /** * None of the parameters are obligatory (ie. all are nullable). Use this method instead of OrderList, which is deprecated. * * @param pair the pair to display the orders eg. btc_usd (default: all pairs) */ @POST @Path("tapi") @FormParam("method") BTCEOpenOrdersReturn ActiveOrders(@HeaderParam("Key") String apiKey, @HeaderParam("Sign") ParamsDigest signer, @FormParam("nonce") SynchronizedValueFactory<Long> nonce, @FormParam("pair") String pair) throws IOException; /** * All parameters are obligatory (ie. none may be null). * * @param pair pair, eg. btc_usd * @param type The transaction type (buy or sell) * @param rate The price to buy/sell * @param amount The amount which is necessary to buy/sell */ @POST @Path("tapi") @FormParam("method") BTCEPlaceOrderReturn Trade(@HeaderParam("Key") String apiKey, @HeaderParam("Sign") ParamsDigest signer, @FormParam("nonce") SynchronizedValueFactory<Long> nonce, @FormParam("pair") String pair, @FormParam("type") BTCEOrder.Type type, @FormParam("rate") BigDecimal rate, @FormParam("amount") BigDecimal amount) throws IOException; @POST @Path("tapi") @FormParam("method") BTCECancelOrderReturn CancelOrder(@HeaderParam("Key") String apiKey, @HeaderParam("Sign") ParamsDigest signer, @FormParam("nonce") SynchronizedValueFactory<Long> nonce, @FormParam("order_id") Long orderId) throws IOException; /** * All parameters are nullable * * @param from The number of the transactions to start displaying with; default 0 * @param count The number of transactions for displaying; default 1000 * @param fromId The ID of the transaction to start displaying with; default 0 * @param endId The ID of the transaction to finish displaying with; default +inf * @param order sorting ASC or DESC; default DESC * @param since When to start displaying; UNIX time default 0 * @param end When to finish displaying; UNIX time default +inf * @param pair The pair to show the transaction; example btc_usd; all pairs * @return {success=1, return={tradeId={pair=btc_usd, type=sell, amount=1, rate=1, orderId=1234, timestamp=1234}}} */ @POST @Path("tapi") @FormParam("method") BTCETradeHistoryReturn TradeHistory(@HeaderParam("Key") String apiKey, @HeaderParam("Sign") ParamsDigest signer, @FormParam("nonce") SynchronizedValueFactory<Long> nonce, @FormParam("from") Long from, @FormParam("count") Long count, @FormParam("from_id") Long fromId, @FormParam("end_id") Long endId, @FormParam("order") SortOrder order, @FormParam("since") Long since, @FormParam("end") Long end, @FormParam("pair") String pair) throws IOException; /** * POST to retrieve transaction history from BTCE exchange. All parameters are nullable * * @param from The number of the transactions to start displaying with; default 0 * @param count The number of transactions for displaying; default 1000 * @param fromId The ID of the transaction to start displaying with; default 0 * @param endId The ID of the transaction to finish displaying with; default +inf * @param order sorting ASC or DESC; default DESC * @param since When to start displaying; UNIX time default 0 * @param end When to finish displaying; UNIX time default +inf * @return JSON like {success=1, return={tradeId={type=sell, amount=1.00000000, currency="BTC", status=2, description="BTC Payment", * timestamp=1234}}} */ @POST @Path("tapi") @FormParam("method") BTCETransHistoryReturn TransHistory(@HeaderParam("Key") String apiKey, @HeaderParam("Sign") ParamsDigest signer, @FormParam("nonce") SynchronizedValueFactory<Long> nonce, @FormParam("from") Long from, @FormParam("count") Long count, @FormParam("from_id") Long fromId, @FormParam("end_id") Long endId, @FormParam("order") SortOrder order, @FormParam("since") Long since, @FormParam("end") Long end) throws IOException; enum SortOrder { ASC, DESC } /** * Author: Ondřej Novtný * * @param currency Currency to withdraw * @param amount Amount of withdrawal * @param address Withdrawall address * @return */ @POST @Path("tapi") @FormParam("method") BTCEWithDrawInfoReturn WithdrawCoin(@HeaderParam("Key") String apiKey, @HeaderParam("Sign") ParamsDigest signer, @FormParam("nonce") SynchronizedValueFactory<Long> nonce, @FormParam("coinName") String coinName, @FormParam("amount") BigDecimal amount, @FormParam("address") String address); }
mit
computergeek1507/openhab
bundles/binding/org.openhab.binding.zwave/src/main/java/org/openhab/binding/zwave/internal/protocol/serialmessage/RemoveFailedNodeMessageClass.java
5682
/** * Copyright (c) 2010-2019 Contributors to the openHAB project * * See the NOTICE file(s) distributed with this work for additional * information. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.openhab.binding.zwave.internal.protocol.serialmessage; import org.openhab.binding.zwave.internal.protocol.SerialMessage; import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessageClass; import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessagePriority; import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessageType; import org.openhab.binding.zwave.internal.protocol.ZWaveController; import org.openhab.binding.zwave.internal.protocol.event.ZWaveNetworkEvent; import org.openhab.binding.zwave.internal.protocol.event.ZWaveNetworkEvent.State; import org.openhab.binding.zwave.internal.protocol.event.ZWaveNetworkEvent.Type; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class processes a serial message from the zwave controller * * @author Chris Jackson * @since 1.5.0 */ public class RemoveFailedNodeMessageClass extends ZWaveCommandProcessor { private static final Logger logger = LoggerFactory.getLogger(RemoveFailedNodeMessageClass.class); private final int FAILED_NODE_REMOVE_STARTED = 0x00; private final int FAILED_NODE_NOT_PRIMARY_CONTROLLER = 0x02; private final int FAILED_NODE_NO_CALLBACK_FUNCTION = 0x04; private final int FAILED_NODE_NOT_FOUND = 0x08; private final int FAILED_NODE_REMOVE_PROCESS_BUSY = 0x10; private final int FAILED_NODE_REMOVE_FAIL = 0x20; private final int FAILED_NODE_OK = 0x00; private final int FAILED_NODE_REMOVED = 0x01; private final int FAILED_NODE_NOT_REMOVED = 0x02; public SerialMessage doRequest(int nodeId) { logger.debug("NODE {}: Marking node as having failed.", nodeId); // Queue the request SerialMessage newMessage = new SerialMessage(SerialMessageClass.RemoveFailedNodeID, SerialMessageType.Request, SerialMessageClass.RemoveFailedNodeID, SerialMessagePriority.High); byte[] newPayload = { (byte) nodeId, (byte) 0xfe }; newMessage.setMessagePayload(newPayload); return newMessage; } @Override public boolean handleResponse(ZWaveController zController, SerialMessage lastSentMessage, SerialMessage incomingMessage) { logger.debug("Got RemoveFailedNode response."); int nodeId = lastSentMessage.getMessagePayloadByte(0); switch (incomingMessage.getMessagePayloadByte(0)) { case FAILED_NODE_REMOVE_STARTED: logger.debug("NODE {}: Remove failed node successfully placed on stack.", nodeId); break; case FAILED_NODE_NOT_PRIMARY_CONTROLLER: logger.error("NODE {}: Remove failed node failed as not Primary Controller for node!", nodeId); transactionComplete = true; break; case FAILED_NODE_NO_CALLBACK_FUNCTION: logger.error("NODE {}: Remove failed node failed as no callback function!", nodeId); transactionComplete = true; break; case FAILED_NODE_NOT_FOUND: logger.error("NODE {}: Remove failed node failed as node not found!", nodeId); transactionComplete = true; break; case FAILED_NODE_REMOVE_PROCESS_BUSY: logger.error("NODE {}: Remove failed node failed as Controller Busy!", nodeId); transactionComplete = true; break; case FAILED_NODE_REMOVE_FAIL: logger.error("NODE {}: Remove failed node failed!", nodeId); transactionComplete = true; break; default: logger.error("NODE {}: Remove failed node not placed on stack due to error 0x{}.", nodeId, Integer.toHexString(incomingMessage.getMessagePayloadByte(0))); transactionComplete = true; break; } return true; } @Override public boolean handleRequest(ZWaveController zController, SerialMessage lastSentMessage, SerialMessage incomingMessage) { int nodeId = lastSentMessage.getMessagePayloadByte(0); logger.debug("NODE {}: Got RemoveFailedNode request.", nodeId); switch (incomingMessage.getMessagePayloadByte(0)) { case FAILED_NODE_OK: logger.error("NODE {}: Unable to remove failed node as it is not a failed node!", nodeId); transactionComplete = true; break; case FAILED_NODE_REMOVED: logger.debug("NODE {}: Successfully removed node from controller database!", nodeId); zController.notifyEventListeners(new ZWaveNetworkEvent(Type.DeleteNode, nodeId, State.Success)); transactionComplete = true; break; case FAILED_NODE_NOT_REMOVED: logger.error("NODE {}: Unable to remove failed node!", nodeId); transactionComplete = true; break; default: logger.error("NODE {}: Remove failed node failed with error 0x{}.", nodeId, Integer.toHexString(incomingMessage.getMessagePayloadByte(0))); transactionComplete = true; break; } return true; } }
epl-1.0
theoweiss/openhab2
bundles/org.openhab.binding.darksky/src/main/java/org/openhab/binding/darksky/internal/model/DarkSkyJsonWeatherData.java
2528
/** * Copyright (c) 2010-2019 Contributors to the openHAB project * * See the NOTICE file(s) distributed with this work for additional * information. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.openhab.binding.darksky.internal.model; import java.util.List; import org.eclipse.jdt.annotation.Nullable; /** * The {@link DarkSkyJsonWeatherData} is the Java class used to map the JSON response to an Dark Sky request. * * @author Christoph Weitkamp - Initial contribution */ public class DarkSkyJsonWeatherData { private double latitude; private double longitude; private String timezone; private DarkSkyCurrentlyData currently; private DarkSkyHourlyData hourly; private DarkSkyDailyData daily; private @Nullable List<AlertsData> alerts; private int offset; public double getLatitude() { return latitude; } public void setLatitude(double latitude) { this.latitude = latitude; } public double getLongitude() { return longitude; } public void setLongitude(double longitude) { this.longitude = longitude; } public String getTimezone() { return timezone; } public void setTimezone(String timezone) { this.timezone = timezone; } public DarkSkyCurrentlyData getCurrently() { return currently; } public void setCurrently(DarkSkyCurrentlyData currently) { this.currently = currently; } public DarkSkyHourlyData getHourly() { return hourly; } public void setHourly(DarkSkyHourlyData hourly) { this.hourly = hourly; } public DarkSkyDailyData getDaily() { return daily; } public void setDaily(DarkSkyDailyData daily) { this.daily = daily; } public @Nullable List<AlertsData> getAlerts() { return alerts; } public void setAlerts(List<AlertsData> alerts) { this.alerts = alerts; } public int getOffset() { return offset; } public void setOffset(int offset) { this.offset = offset; } public class AlertsData { public String title; public int time; public int expires; public String description; public String severity; public String uri; public List<String> regions; } }
epl-1.0
idserda/openhab
bundles/binding/org.openhab.binding.tcp/src/main/java/org/openhab/binding/tcp/protocol/internal/TCPGenericBindingProvider.java
831
/** * Copyright (c) 2010-2019 by the respective copyright holders. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.binding.tcp.protocol.internal; import org.openhab.binding.tcp.protocol.TCPBindingProvider; /** * * tcp=">[ON:192.168.0.1:3000:some text], >[OFF:192.168.0.1:3000:some other command]" * tcp="<[192.168.0.1:3000]" - for String, Number,... Items * * @author Karel Goderis * @since 1.1.0 * */ public class TCPGenericBindingProvider extends ProtocolGenericBindingProvider implements TCPBindingProvider { @Override public String getBindingType() { return "tcp"; } }
epl-1.0
huran2014/huran.github.io
program_learning/Java/MyEclipseProfessional2014/mr/src/main/java/org/apache/mahout/ep/Mapping.java
6039
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.mahout.ep; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import com.google.common.base.Preconditions; import org.apache.hadoop.io.Writable; import org.apache.mahout.classifier.sgd.PolymorphicWritable; import org.apache.mahout.math.function.DoubleFunction; /** * Provides coordinate tranformations so that evolution can proceed on the entire space of * reals but have the output limited and squished in convenient (and safe) ways. */ public abstract class Mapping extends DoubleFunction implements Writable { private Mapping() { } public static final class SoftLimit extends Mapping { private double min; private double max; private double scale; public SoftLimit() { } private SoftLimit(double min, double max, double scale) { this.min = min; this.max = max; this.scale = scale; } @Override public double apply(double v) { return min + (max - min) * 1 / (1 + Math.exp(-v * scale)); } @Override public void write(DataOutput out) throws IOException { out.writeDouble(min); out.writeDouble(max); out.writeDouble(scale); } @Override public void readFields(DataInput in) throws IOException { min = in.readDouble(); max = in.readDouble(); scale = in.readDouble(); } } public static final class LogLimit extends Mapping { private Mapping wrapped; public LogLimit() { } private LogLimit(double low, double high) { wrapped = softLimit(Math.log(low), Math.log(high)); } @Override public double apply(double v) { return Math.exp(wrapped.apply(v)); } @Override public void write(DataOutput dataOutput) throws IOException { PolymorphicWritable.write(dataOutput, wrapped); } @Override public void readFields(DataInput in) throws IOException { wrapped = PolymorphicWritable.read(in, Mapping.class); } } public static final class Exponential extends Mapping { private double scale; public Exponential() { } private Exponential(double scale) { this.scale = scale; } @Override public double apply(double v) { return Math.exp(v * scale); } @Override public void write(DataOutput out) throws IOException { out.writeDouble(scale); } @Override public void readFields(DataInput in) throws IOException { scale = in.readDouble(); } } public static final class Identity extends Mapping { @Override public double apply(double v) { return v; } @Override public void write(DataOutput dataOutput) { // stateless } @Override public void readFields(DataInput dataInput) { // stateless } } /** * Maps input to the open interval (min, max) with 0 going to the mean of min and * max. When scale is large, a larger proportion of values are mapped to points * near the boundaries. When scale is small, a larger proportion of values are mapped to * points well within the boundaries. * @param min The largest lower bound on values to be returned. * @param max The least upper bound on values to be returned. * @param scale Defines how sharp the boundaries are. * @return A mapping that satisfies the desired constraint. */ public static Mapping softLimit(double min, double max, double scale) { return new SoftLimit(min, max, scale); } /** * Maps input to the open interval (min, max) with 0 going to the mean of min and * max. When scale is large, a larger proportion of values are mapped to points * near the boundaries. * @see #softLimit(double, double, double) * @param min The largest lower bound on values to be returned. * @param max The least upper bound on values to be returned. * @return A mapping that satisfies the desired constraint. */ public static Mapping softLimit(double min, double max) { return softLimit(min, max, 1); } /** * Maps input to positive values in the open interval (min, max) with * 0 going to the geometric mean. Near the geometric mean, values are * distributed roughly geometrically. * @param low The largest lower bound for output results. Must be >0. * @param high The least upper bound for output results. Must be >0. * @return A mapped value. */ public static Mapping logLimit(double low, double high) { Preconditions.checkArgument(low > 0, "Lower bound for log limit must be > 0 but was %f", low); Preconditions.checkArgument(high > 0, "Upper bound for log limit must be > 0 but was %f", high); return new LogLimit(low, high); } /** * Maps results to positive values. * @return A positive value. */ public static Mapping exponential() { return exponential(1); } /** * Maps results to positive values. * @param scale If large, then large values are more likely. * @return A positive value. */ public static Mapping exponential(double scale) { return new Exponential(scale); } /** * Maps results to themselves. * @return The original value. */ public static Mapping identity() { return new Identity(); } }
gpl-2.0
hexbinary/landing
src/main/java/org/oscarehr/util/HinValidator.java
1790
/** * * Copyright (c) 2005-2012. Centre for Research on Inner City Health, St. Michael's Hospital, Toronto. All Rights Reserved. * This software is published under the GPL GNU General Public License. * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * This software was written for * Centre for Research on Inner City Health, St. Michael's Hospital, * Toronto, Ontario, Canada */ package org.oscarehr.util; import org.apache.commons.lang.StringUtils; public class HinValidator { /** * This method will return false if the hin is clearly invalid. It will return true otherwise. This means that the default is true and anything it can't figure out will return true. As an example if the hinType is null then there's no validation * algorithm so it will return true. */ public static boolean isValid(String hin, String hinType) { if ("on".equals(hinType)) return (isValid_on(hin)); else return (true); } private static boolean isValid_on(String hin) { if (hin == null) return (false); if (hin.length() != 10) return (false); if (!StringUtils.isNumeric(hin)) return (false); return (true); } }
gpl-2.0
pabalexa/calibre2opds
Tools/src/main/java/com/gmail/dpierron/tools/LinuxOS.java
545
package com.gmail.dpierron.tools; import java.io.File; import java.io.IOException; public class LinuxOS extends BaseOS { public LinuxOS(String osTypeName) { super(osTypeName); } @Override Process _openFile(File file) throws IOException { Process result = null; try { result = Runtime.getRuntime().exec("konqueror file:///" + file.getAbsolutePath()); } catch (Exception e) { result = Runtime.getRuntime().exec("gnome-open " + file.getAbsolutePath()); } return result; } }
gpl-3.0
Scrik/Cauldron-1
eclipse/cauldron/src/main/java/net/minecraft/scoreboard/Score.java
2777
package net.minecraft.scoreboard; import java.util.Comparator; import java.util.List; public class Score { public static final Comparator field_96658_a = new Comparator() { private static final String __OBFID = "CL_00000618"; public int compare(Score p_compare_1_, Score p_compare_2_) { return p_compare_1_.getScorePoints() > p_compare_2_.getScorePoints() ? 1 : (p_compare_1_.getScorePoints() < p_compare_2_.getScorePoints() ? -1 : 0); } public int compare(Object p_compare_1_, Object p_compare_2_) { return this.compare((Score)p_compare_1_, (Score)p_compare_2_); } }; private final Scoreboard theScoreboard; private final ScoreObjective theScoreObjective; private final String scorePlayerName; private int field_96655_e; private static final String __OBFID = "CL_00000617"; public Score(Scoreboard p_i2309_1_, ScoreObjective p_i2309_2_, String p_i2309_3_) { this.theScoreboard = p_i2309_1_; this.theScoreObjective = p_i2309_2_; this.scorePlayerName = p_i2309_3_; } public void increseScore(int p_96649_1_) { if (this.theScoreObjective.getCriteria().isReadOnly()) { throw new IllegalStateException("Cannot modify read-only score"); } else { this.setScorePoints(this.getScorePoints() + p_96649_1_); } } public void decreaseScore(int p_96646_1_) { if (this.theScoreObjective.getCriteria().isReadOnly()) { throw new IllegalStateException("Cannot modify read-only score"); } else { this.setScorePoints(this.getScorePoints() - p_96646_1_); } } public void func_96648_a() { if (this.theScoreObjective.getCriteria().isReadOnly()) { throw new IllegalStateException("Cannot modify read-only score"); } else { this.increseScore(1); } } public int getScorePoints() { return this.field_96655_e; } public void setScorePoints(int p_96647_1_) { int j = this.field_96655_e; this.field_96655_e = p_96647_1_; if (j != p_96647_1_) { this.getScoreScoreboard().func_96536_a(this); } } public ScoreObjective func_96645_d() { return this.theScoreObjective; } public String getPlayerName() { return this.scorePlayerName; } public Scoreboard getScoreScoreboard() { return this.theScoreboard; } public void func_96651_a(List p_96651_1_) { this.setScorePoints(this.theScoreObjective.getCriteria().func_96635_a(p_96651_1_)); } }
gpl-3.0