gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package com.guoxiaoxing.cloud.music.service;
import android.app.Activity;
import android.content.ComponentName;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.Intent;
import android.content.ServiceConnection;
import android.database.Cursor;
import android.net.Uri;
import android.os.IBinder;
import android.os.RemoteException;
import android.provider.MediaStore;
import android.util.Log;
import android.widget.Toast;
import com.guoxiaoxing.cloud.music.MediaAidlInterface;
import com.guoxiaoxing.cloud.music.R;
import com.guoxiaoxing.cloud.music.model.info.MusicInfo;
import java.util.Arrays;
import java.util.HashMap;
import java.util.WeakHashMap;
public class MusicPlayer {
private static final WeakHashMap<Context, ServiceBinder> mConnectionMap;
private static final long[] sEmptyList;
public static MediaAidlInterface mService = null;
private static ContentValues[] mContentValuesCache = null;
static {
mConnectionMap = new WeakHashMap<Context, ServiceBinder>();
sEmptyList = new long[0];
}
public static final ServiceToken bindToService(final Context context,
final ServiceConnection callback) {
Activity realActivity = ((Activity) context).getParent();
if (realActivity == null) {
realActivity = (Activity) context;
}
final ContextWrapper contextWrapper = new ContextWrapper(realActivity);
contextWrapper.startService(new Intent(contextWrapper, MediaService.class));
final ServiceBinder binder = new ServiceBinder(callback,
contextWrapper.getApplicationContext());
if (contextWrapper.bindService(
new Intent().setClass(contextWrapper, MediaService.class), binder, 0)) {
mConnectionMap.put(contextWrapper, binder);
return new ServiceToken(contextWrapper);
}
return null;
}
public static void unbindFromService(final ServiceToken token) {
if (token == null) {
return;
}
final ContextWrapper mContextWrapper = token.mWrappedContext;
final ServiceBinder mBinder = mConnectionMap.remove(mContextWrapper);
if (mBinder == null) {
return;
}
mContextWrapper.unbindService(mBinder);
if (mConnectionMap.isEmpty()) {
mService = null;
}
}
public static final boolean isPlaybackServiceConnected() {
return mService != null;
}
public static void next() {
try {
if (mService != null) {
mService.next();
}
} catch (final RemoteException ignored) {
}
}
public static void initPlaybackServiceWithSettings(final Context context) {
setShowAlbumArtOnLockscreen(true);
}
public static void setShowAlbumArtOnLockscreen(final boolean enabled) {
try {
if (mService != null) {
mService.setLockscreenAlbumArt(enabled);
}
} catch (final RemoteException ignored) {
}
}
public static void asyncNext(final Context context) {
final Intent previous = new Intent(context, MediaService.class);
previous.setAction(MediaService.NEXT_ACTION);
context.startService(previous);
}
public static void previous(final Context context, final boolean force) {
final Intent previous = new Intent(context, MediaService.class);
if (force) {
previous.setAction(MediaService.PREVIOUS_FORCE_ACTION);
} else {
previous.setAction(MediaService.PREVIOUS_ACTION);
}
context.startService(previous);
}
public static void playOrPause() {
try {
if (mService != null) {
if (mService.isPlaying()) {
mService.pause();
} else {
mService.play();
}
}
} catch (final Exception ignored) {
}
}
public static boolean isTrackLocal() {
try {
if (mService != null) {
return mService.isTrackLocal();
}
} catch (RemoteException e) {
e.printStackTrace();
}
return false;
}
public static void cycleRepeat() {
try {
if (mService != null) {
if (mService.getShuffleMode() == MediaService.SHUFFLE_NORMAL) {
mService.setShuffleMode(MediaService.SHUFFLE_NONE);
mService.setRepeatMode(MediaService.REPEAT_CURRENT);
return;
} else {
switch (mService.getRepeatMode()) {
case MediaService.REPEAT_CURRENT:
mService.setRepeatMode(MediaService.REPEAT_ALL);
break;
case MediaService.REPEAT_ALL:
mService.setShuffleMode(MediaService.SHUFFLE_NORMAL);
// if (mService.getShuffleMode() != MediaService.SHUFFLE_NONE) {
// mService.setShuffleMode(MediaService.SHUFFLE_NONE);
// }
break;
}
}
}
} catch (final RemoteException ignored) {
}
}
public static void cycleShuffle() {
try {
if (mService != null) {
switch (mService.getShuffleMode()) {
case MediaService.SHUFFLE_NONE:
mService.setShuffleMode(MediaService.SHUFFLE_NORMAL);
if (mService.getRepeatMode() == MediaService.REPEAT_CURRENT) {
mService.setRepeatMode(MediaService.REPEAT_ALL);
}
break;
case MediaService.SHUFFLE_NORMAL:
mService.setShuffleMode(MediaService.SHUFFLE_NONE);
break;
// case MediaService.SHUFFLE_AUTO:
// mService.setShuffleMode(MediaService.SHUFFLE_NONE);
// break;
default:
break;
}
}
} catch (final RemoteException ignored) {
}
}
public static final boolean isPlaying() {
if (mService != null) {
try {
return mService.isPlaying();
} catch (final RemoteException ignored) {
}
}
return false;
}
public static final int getShuffleMode() {
if (mService != null) {
try {
return mService.getShuffleMode();
} catch (final RemoteException ignored) {
}
}
return 0;
}
public static void setShuffleMode(int mode) {
try {
if (mService != null) {
mService.setShuffleMode(mode);
}
} catch (RemoteException ignored) {
}
}
public static final int getRepeatMode() {
if (mService != null) {
try {
return mService.getRepeatMode();
} catch (final RemoteException ignored) {
}
}
return 0;
}
public static final String getTrackName() {
if (mService != null) {
try {
return mService.getTrackName();
} catch (final RemoteException ignored) {
}
}
return null;
}
public static final String getArtistName() {
if (mService != null) {
try {
return mService.getArtistName();
} catch (final RemoteException ignored) {
}
}
return null;
}
public static final String getAlbumName() {
if (mService != null) {
try {
return mService.getAlbumName();
} catch (final RemoteException ignored) {
}
}
return null;
}
public static final String getAlbumPath() {
if (mService != null) {
try {
return mService.getAlbumPath();
} catch (final RemoteException ignored) {
}
}
return null;
}
public static final String[] getAlbumPathAll() {
if (mService != null) {
try {
return mService.getAlbumPathtAll();
} catch (final RemoteException ignored) {
}
}
return null;
}
public static final long getCurrentAlbumId() {
if (mService != null) {
try {
return mService.getAlbumId();
} catch (final RemoteException ignored) {
}
}
return -1;
}
public static final long getCurrentAudioId() {
if (mService != null) {
try {
return mService.getAudioId();
} catch (final RemoteException ignored) {
}
}
return -1;
}
public static final MusicTrack getCurrentTrack() {
if (mService != null) {
try {
return mService.getCurrentTrack();
} catch (final RemoteException ignored) {
}
}
return null;
}
public static final MusicTrack getTrack(int index) {
if (mService != null) {
try {
return mService.getTrack(index);
} catch (final RemoteException ignored) {
}
}
return null;
}
public static final long getNextAudioId() {
if (mService != null) {
try {
return mService.getNextAudioId();
} catch (final RemoteException ignored) {
}
}
return -1;
}
public static final long getPreviousAudioId() {
if (mService != null) {
try {
return mService.getPreviousAudioId();
} catch (final RemoteException ignored) {
}
}
return -1;
}
public static final long getCurrentArtistId() {
if (mService != null) {
try {
return mService.getArtistId();
} catch (final RemoteException ignored) {
}
}
return -1;
}
public static final int getAudioSessionId() {
if (mService != null) {
try {
return mService.getAudioSessionId();
} catch (final RemoteException ignored) {
}
}
return -1;
}
public static final long[] getQueue() {
try {
if (mService != null) {
return mService.getQueue();
} else {
}
} catch (final RemoteException ignored) {
}
return sEmptyList;
}
public static final HashMap<Long, MusicInfo> getPlayinfos() {
try {
if (mService != null) {
return (HashMap<Long, MusicInfo>) mService.getPlayinfos();
} else {
}
} catch (final RemoteException ignored) {
}
return null;
}
public static final long getQueueItemAtPosition(int position) {
try {
if (mService != null) {
return mService.getQueueItemAtPosition(position);
} else {
}
} catch (final RemoteException ignored) {
}
return -1;
}
public static final int getQueueSize() {
try {
if (mService != null) {
return mService.getQueueSize();
} else {
}
} catch (final RemoteException ignored) {
}
return 0;
}
public static final int getQueuePosition() {
try {
if (mService != null) {
return mService.getQueuePosition();
}
} catch (final RemoteException ignored) {
}
return 0;
}
public static void setQueuePosition(final int position) {
if (mService != null) {
try {
mService.setQueuePosition(position);
} catch (final RemoteException ignored) {
}
}
}
public static final int getQueueHistorySize() {
if (mService != null) {
try {
return mService.getQueueHistorySize();
} catch (final RemoteException ignored) {
}
}
return 0;
}
public static final int getQueueHistoryPosition(int position) {
if (mService != null) {
try {
return mService.getQueueHistoryPosition(position);
} catch (final RemoteException ignored) {
}
}
return -1;
}
public static final int[] getQueueHistoryList() {
if (mService != null) {
try {
return mService.getQueueHistoryList();
} catch (final RemoteException ignored) {
}
}
return null;
}
public static final int removeTrack(final long id) {
try {
if (mService != null) {
return mService.removeTrack(id);
}
} catch (final RemoteException ingored) {
}
return 0;
}
public static final boolean removeTrackAtPosition(final long id, final int position) {
try {
if (mService != null) {
return mService.removeTrackAtPosition(id, position);
}
} catch (final RemoteException ingored) {
}
return false;
}
public static void moveQueueItem(final int from, final int to) {
try {
if (mService != null) {
mService.moveQueueItem(from, to);
} else {
}
} catch (final RemoteException ignored) {
}
}
public static synchronized void playAll(final HashMap<Long, MusicInfo> infos, final long[] list, int position, final boolean forceShuffle) {
if (list == null || list.length == 0 || mService == null) {
return;
}
try {
if (forceShuffle) {
mService.setShuffleMode(MediaService.SHUFFLE_NORMAL);
}
final long currentId = mService.getAudioId();
long playId = list[position];
Log.e("currentId", currentId + "");
final int currentQueuePosition = getQueuePosition();
if (position != -1) {
final long[] playlist = getQueue();
if (Arrays.equals(list, playlist)) {
if (currentQueuePosition == position && currentId == list[position]) {
mService.play();
return;
} else {
mService.setQueuePosition(position);
return;
}
}
}
if (position < 0) {
position = 0;
}
mService.open(infos, list, forceShuffle ? -1 : position);
mService.play();
Log.e("time", System.currentTimeMillis() + "");
} catch (final RemoteException ignored) {
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
public static void playNext(Context context, final HashMap<Long, MusicInfo> map, final long[] list) {
if (mService == null) {
return;
}
try {
int current = -1;
long[] result = list;
for (int i = 0; i < list.length; i++) {
if (MusicPlayer.getCurrentAudioId() == list[i]) {
current = i;
} else {
MusicPlayer.removeTrack(list[i]);
}
}
// if( current != -1){
// ArrayList lists = new ArrayList();
// for(int i = 0; i<list.length;i++){
// if(i != current){
// lists.add(list[i]);
// }
// }
// result = new long[list.length - 1];
// for(int i = 0;i<lists.size();i++){
// result[i] = (long) lists.get(i);
// }
// }
mService.enqueue(list, map, MediaService.NEXT);
Toast.makeText(context, R.string.next_play, Toast.LENGTH_SHORT).show();
} catch (final RemoteException ignored) {
}
}
public static String getPath() {
if (mService == null) {
return null;
}
try {
return mService.getPath();
} catch (Exception e) {
}
return null;
}
public static void stop() {
try {
mService.stop();
} catch (Exception e) {
}
}
public static final int getSongCountForAlbumInt(final Context context, final long id) {
int songCount = 0;
if (id == -1) {
return songCount;
}
Uri uri = ContentUris.withAppendedId(MediaStore.Audio.Albums.EXTERNAL_CONTENT_URI, id);
Cursor cursor = context.getContentResolver().query(uri,
new String[]{MediaStore.Audio.AlbumColumns.NUMBER_OF_SONGS}, null, null, null);
if (cursor != null) {
cursor.moveToFirst();
if (!cursor.isAfterLast()) {
if (!cursor.isNull(0)) {
songCount = cursor.getInt(0);
}
}
cursor.close();
cursor = null;
}
return songCount;
}
public static final String getReleaseDateForAlbum(final Context context, final long id) {
if (id == -1) {
return null;
}
Uri uri = ContentUris.withAppendedId(MediaStore.Audio.Albums.EXTERNAL_CONTENT_URI, id);
Cursor cursor = context.getContentResolver().query(uri, new String[]{
MediaStore.Audio.AlbumColumns.FIRST_YEAR
}, null, null, null);
String releaseDate = null;
if (cursor != null) {
cursor.moveToFirst();
if (!cursor.isAfterLast()) {
releaseDate = cursor.getString(0);
}
cursor.close();
cursor = null;
}
return releaseDate;
}
public static void seek(final long position) {
if (mService != null) {
try {
mService.seek(position);
} catch (final RemoteException ignored) {
}
}
}
public static void seekRelative(final long deltaInMs) {
if (mService != null) {
try {
mService.seekRelative(deltaInMs);
} catch (final RemoteException ignored) {
} catch (final IllegalStateException ignored) {
}
}
}
public static final long position() {
if (mService != null) {
try {
return mService.position();
} catch (final RemoteException ignored) {
} catch (final IllegalStateException ex) {
}
}
return 0;
}
public static final int secondPosition() {
if (mService != null) {
try {
return mService.secondPosition();
} catch (final RemoteException ignored) {
} catch (final IllegalStateException ex) {
}
}
return 0;
}
public static final long duration() {
if (mService != null) {
try {
return mService.duration();
} catch (final RemoteException ignored) {
} catch (final IllegalStateException ignored) {
}
}
return 0;
}
public static void clearQueue() {
try {
if (mService != null)
mService.removeTracks(0, Integer.MAX_VALUE);
} catch (final RemoteException ignored) {
}
}
public static void addToQueue(final Context context, final long[] list, long sourceId) {
if (mService == null) {
return;
}
try {
mService.enqueue(list, null, MediaService.LAST);
//final String message = makeLabel(mContext, R.plurals.NNNtrackstoqueue, list.length);
//Toast.makeText(mContext, message, Toast.LENGTH_SHORT).show();
} catch (final RemoteException ignored) {
}
}
public static void addToPlaylist(final Context context, final long[] ids, final long playlistid) {
final int size = ids.length;
final ContentResolver resolver = context.getContentResolver();
final String[] projection = new String[]{
"max(" + "play_order" + ")",
};
final Uri uri = MediaStore.Audio.Playlists.Members.getContentUri("external", playlistid);
Cursor cursor = null;
int base = 0;
try {
cursor = resolver.query(uri, projection, null, null, null);
if (cursor != null && cursor.moveToFirst()) {
base = cursor.getInt(0) + 1;
}
} finally {
if (cursor != null) {
cursor.close();
cursor = null;
}
}
int numinserted = 0;
for (int offSet = 0; offSet < size; offSet += 1000) {
makeInsertItems(ids, offSet, 1000, base);
numinserted += resolver.bulkInsert(uri, mContentValuesCache);
}
}
public static void makeInsertItems(final long[] ids, final int offset, int len, final int base) {
if (offset + len > ids.length) {
len = ids.length - offset;
}
if (mContentValuesCache == null || mContentValuesCache.length != len) {
mContentValuesCache = new ContentValues[len];
}
for (int i = 0; i < len; i++) {
if (mContentValuesCache[i] == null) {
mContentValuesCache[i] = new ContentValues();
}
mContentValuesCache[i].put(MediaStore.Audio.Playlists.Members.PLAY_ORDER, base + offset + i);
mContentValuesCache[i].put(MediaStore.Audio.Playlists.Members.AUDIO_ID, ids[offset + i]);
}
}
public static final long createPlaylist(final Context context, final String name) {
if (name != null && name.length() > 0) {
final ContentResolver resolver = context.getContentResolver();
final String[] projection = new String[]{
MediaStore.Audio.PlaylistsColumns.NAME
};
final String selection = MediaStore.Audio.PlaylistsColumns.NAME + " = '" + name + "'";
Cursor cursor = resolver.query(MediaStore.Audio.Playlists.EXTERNAL_CONTENT_URI,
projection, selection, null, null);
if (cursor.getCount() <= 0) {
final ContentValues values = new ContentValues(1);
values.put(MediaStore.Audio.PlaylistsColumns.NAME, name);
final Uri uri = resolver.insert(MediaStore.Audio.Playlists.EXTERNAL_CONTENT_URI,
values);
return Long.parseLong(uri.getLastPathSegment());
}
if (cursor != null) {
cursor.close();
cursor = null;
}
return -1;
}
return -1;
}
public static void exitService() {
// if (mService == null) {
// return;
// }
try {
mConnectionMap.clear();
Log.e("exitmp", "Destroying service");
mService.exit();
} catch (Exception e) {
}
}
public static void timing(int time) {
if (mService == null) {
return;
}
try {
mService.timing(time);
} catch (Exception e) {
}
}
public static final class ServiceBinder implements ServiceConnection {
private final ServiceConnection mCallback;
private final Context mContext;
public ServiceBinder(final ServiceConnection callback, final Context context) {
mCallback = callback;
mContext = context;
}
@Override
public void onServiceConnected(final ComponentName className, final IBinder service) {
mService = MediaAidlInterface.Stub.asInterface(service);
if (mCallback != null) {
mCallback.onServiceConnected(className, service);
}
initPlaybackServiceWithSettings(mContext);
}
@Override
public void onServiceDisconnected(final ComponentName className) {
if (mCallback != null) {
mCallback.onServiceDisconnected(className);
}
mService = null;
}
}
public static final class ServiceToken {
public ContextWrapper mWrappedContext;
public ServiceToken(final ContextWrapper context) {
mWrappedContext = context;
}
}
}
| |
/**
* <copyright>
* </copyright>
*
*
*/
package eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.Path;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.dialogs.ContainerSelectionDialog;
/**
* The NewFileWizardPage allows setting the container for the new file, as well as
* the file name. The page will only accept file names without extension OR with
* an extension that matches the expected one.
*/
public class HyvalidityformulaNewFileWizardPage extends WizardPage {
private final String fileExtension;
private Text containerText;
private Text fileText;
private ISelection selection;
/**
* Constructor for the NewFileWizardPage.
*/
public HyvalidityformulaNewFileWizardPage(ISelection selection, String fileExtension) {
super("wizardPage");
setTitle(eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaUIResourceBundle.NEW_FILE_WIZARD_PAGE_TITLE);
setDescription(eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaUIResourceBundle.NEW_FILE_WIZARD_DESCRIPTION);
this.selection = selection;
this.fileExtension = fileExtension;
}
/**
*
* @see IDialogPage#createControl(Composite)
*/
public void createControl(Composite parent) {
Composite container = new Composite(parent, SWT.NULL);
GridLayout layout = new GridLayout();
container.setLayout(layout);
layout.numColumns = 3;
layout.verticalSpacing = 9;
Label label = new Label(container, SWT.NULL);
label.setText("&Container:");
containerText = new Text(container, SWT.BORDER | SWT.SINGLE);
GridData gd = new GridData(GridData.FILL_HORIZONTAL);
containerText.setLayoutData(gd);
containerText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
dialogChanged();
}
});
Button button = new Button(container, SWT.PUSH);
button.setText("Browse...");
button.addSelectionListener(new SelectionAdapter() {
public void widgetSelected(SelectionEvent e) {
handleBrowse();
}
});
label = new Label(container, SWT.NULL);
label.setText("&File name:");
fileText = new Text(container, SWT.BORDER | SWT.SINGLE);
gd = new GridData(GridData.FILL_HORIZONTAL);
fileText.setLayoutData(gd);
fileText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
dialogChanged();
}
});
initialize();
dialogChanged();
setControl(container);
}
/**
* Tests if the current workbench selection is a suitable container to use.
*/
private void initialize() {
String name = eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaUIResourceBundle.NEW_FILE_WIZARD_FILE_NAME;
if (selection != null && selection.isEmpty() == false && selection instanceof IStructuredSelection) {
IStructuredSelection ssel = (IStructuredSelection) selection;
if (ssel.size() > 1) {
return;
}
Object obj = ssel.getFirstElement();
// test for IAdaptable
if ((! (obj instanceof IResource)) && (obj instanceof IAdaptable)) {
obj = (IResource) ((IAdaptable) obj).getAdapter(IResource.class);
}
if (obj instanceof IResource) {
IContainer container;
if (obj instanceof IContainer) {
container = (IContainer) obj;
} else {
IResource resource = (IResource) obj;
container = resource.getParent();
// we use the name of the currently selected file instead of 'new_file'.
name = resource.getFullPath().removeFileExtension().lastSegment();
name = name + "." + fileExtension;
}
IPath fullPath = container.getFullPath();
containerText.setText(fullPath.toString());
}
}
// Select default name for new file
fileText.setText(name);
// Select file name without extension
int indexOfDot = name.lastIndexOf(".");
if (indexOfDot > 0) {
fileText.setSelection(0, indexOfDot);
}
}
public void setVisible(boolean visible) {
super.setVisible(visible);
if (visible) {
fileText.setFocus();
}
}
/**
* Uses the standard container selection dialog to choose the new value for the
* container field.
*/
private void handleBrowse() {
ContainerSelectionDialog dialog = new ContainerSelectionDialog(getShell(), ResourcesPlugin.getWorkspace().getRoot(), false, "Select new file container");
if (dialog.open() == ContainerSelectionDialog.OK) {
Object[] result = dialog.getResult();
if (result.length == 1) {
containerText.setText(((Path) result[0]).toString());
}
}
}
/**
* Ensures that both text fields are set.
*/
private void dialogChanged() {
IResource container = ResourcesPlugin.getWorkspace().getRoot().findMember(new Path(getContainerName()));
String fileName = getFileName();
if (getContainerName().length() == 0) {
updateStatus("File container must be specified");
return;
}
if (container == null || (container.getType() & (IResource.PROJECT | IResource.FOLDER)) == 0) {
updateStatus("File container must exist");
return;
}
if (!container.isAccessible()) {
updateStatus("Project must be writable");
return;
}
if (fileName.length() == 0) {
updateStatus("File name must be specified");
return;
}
if (fileName.replace('\\', '/').indexOf('/', 1) > 0) {
updateStatus("File name must be valid");
return;
}
if (!fileName.endsWith("." + fileExtension)) {
updateStatus("File extension must be \"" + fileExtension + "\"");
return;
}
updateStatus(null);
}
private void updateStatus(String message) {
setErrorMessage(message);
setPageComplete(message == null);
}
public String getContainerName() {
return containerText.getText();
}
public String getFileName() {
return fileText.getText();
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.rest.resources;
import com.eclipsesource.json.Json;
import com.eclipsesource.json.JsonArray;
import com.eclipsesource.json.JsonObject;
import com.google.common.collect.ImmutableSet;
import org.junit.Before;
import org.junit.Test;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.osgi.TestServiceDirectory;
import org.onlab.rest.BaseResource;
import org.onosproject.codec.CodecService;
import org.onosproject.codec.impl.CodecManager;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Link;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.net.topology.ClusterId;
import org.onosproject.net.topology.DefaultTopologyCluster;
import org.onosproject.net.topology.DefaultTopologyVertex;
import org.onosproject.net.topology.Topology;
import org.onosproject.net.topology.TopologyCluster;
import org.onosproject.net.topology.TopologyService;
import org.onosproject.net.topology.TopologyServiceAdapter;
import javax.ws.rs.client.WebTarget;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.onosproject.net.NetTestTools.did;
import static org.onosproject.net.NetTestTools.link;
/**
* Unit tests for Topology REST APIs.
*/
public class TopologyResourceTest extends ResourceTest {
private static class MockTopology implements Topology {
@Override
public long time() {
return 11111L;
}
@Override
public long creationTime() {
return 22222L;
}
@Override
public long computeCost() {
return 0;
}
@Override
public int clusterCount() {
return 2;
}
@Override
public int deviceCount() {
return 6;
}
@Override
public int linkCount() {
return 4;
}
@Override
public ProviderId providerId() {
return ProviderId.NONE;
}
}
private static class MockTopologyService extends TopologyServiceAdapter {
final DefaultTopologyVertex root = new DefaultTopologyVertex(did("rootnode"));
final Topology topology = new MockTopology();
final TopologyCluster cluster1 =
new DefaultTopologyCluster(ClusterId.clusterId(0),
2, 1, root);
final TopologyCluster cluster2 =
new DefaultTopologyCluster(ClusterId.clusterId(1),
4, 3, root);
@Override
public Topology currentTopology() {
return topology;
}
@Override
public Set<TopologyCluster> getClusters(Topology topology) {
return ImmutableSet.of(cluster1, cluster2);
}
@Override
public TopologyCluster getCluster(Topology topology, ClusterId clusterId) {
return cluster1;
}
@Override
public Set<DeviceId> getClusterDevices(Topology topology, TopologyCluster cluster) {
DeviceId device1 = did("dev1");
DeviceId device2 = did("dev2");
return ImmutableSet.of(device1, device2);
}
@Override
public Set<Link> getClusterLinks(Topology topology, TopologyCluster cluster) {
Link link1 = link("src1", 1, "dst1", 1);
Link link2 = link("src2", 1, "dst2", 1);
Link link3 = link("src3", 1, "dst3", 1);
return ImmutableSet.of(link1, link2, link3);
}
@Override
public boolean isInfrastructure(Topology topology, ConnectPoint connectPoint) {
return connectPoint.elementId().toString().equals("dev2");
}
@Override
public boolean isBroadcastPoint(Topology topology, ConnectPoint connectPoint) {
return connectPoint.elementId().toString().equals("dev1");
}
}
/**
* Initializes the test harness.
*/
@Before
public void setUpTest() {
TopologyService topologyService = new MockTopologyService();
CodecManager codecService = new CodecManager();
codecService.activate();
ServiceDirectory testDirectory =
new TestServiceDirectory()
.add(TopologyService.class, topologyService)
.add(CodecService.class, codecService);
BaseResource.setServiceDirectory(testDirectory);
}
/**
* Tests the topology overview.
*/
@Test
public void getTopology() {
WebTarget wt = target();
String response = wt.path("topology").request().get(String.class);
JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(4));
assertThat(result.get("time").asLong(), is(11111L));
assertThat(result.get("clusters").asLong(), is(2L));
assertThat(result.get("devices").asLong(), is(6L));
assertThat(result.get("links").asLong(), is(4L));
}
/**
* Tests the clusters overview.
*/
@Test
public void getTopologyClusters() {
WebTarget wt = target();
String response = wt.path("topology/clusters").request().get(String.class);
JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
JsonArray clusters = result.get("clusters").asArray();
assertThat(clusters, notNullValue());
assertThat(clusters.size(), is(2));
}
/**
* Tests an individual cluster overview.
*/
@Test
public void getCluster() {
WebTarget wt = target();
String response = wt.path("topology/clusters/0").request().get(String.class);
JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.get("id").asLong(), is(0L));
assertThat(result.get("deviceCount").asLong(), is(2L));
assertThat(result.get("linkCount").asLong(), is(1L));
assertThat(result.get("root").asString(), containsString("rootnode"));
assertThat(result.names(), hasSize(4));
}
/**
* Tests an individual cluster's devices list.
*/
@Test
public void getClusterDevices() {
WebTarget wt = target();
String response = wt.path("topology/clusters/0/devices").request().get(String.class);
JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
JsonArray devices = result.get("devices").asArray();
assertThat(devices.size(), is(2));
assertThat(devices.get(0).asString(), is("of:dev1"));
assertThat(devices.get(1).asString(), is("of:dev2"));
}
/**
* Tests an individual cluster's links list.
*/
@Test
public void getClusterLinks() {
WebTarget wt = target();
String response = wt.path("topology/clusters/1/links").request().get(String.class);
JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
JsonArray links = result.get("links").asArray();
assertThat(links.size(), is(3));
JsonObject link0 = links.get(0).asObject();
JsonObject src0 = link0.get("src").asObject();
String device0 = src0.get("device").asString();
assertThat(device0, is("of:src1"));
JsonObject link2 = links.get(2).asObject();
JsonObject src2 = link2.get("src").asObject();
String device2 = src2.get("device").asString();
assertThat(device2, is("of:src3"));
}
/**
* Tests a broadcast query.
*/
@Test
public void getBroadcast() {
WebTarget wt = target();
String response = wt.path("topology/broadcast/dev1:1").request().get(String.class);
JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.get("broadcast").asBoolean(), is(true));
}
/**
* Tests an infrastructure query.
*/
@Test
public void getInfrastructure() {
WebTarget wt = target();
String response = wt.path("topology/infrastructure/dev2:1").request().get(String.class);
JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.get("infrastructure").asBoolean(), is(true));
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.block;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.block.ByteArrayBlock;
import com.facebook.presto.spi.block.RowBlockBuilder;
import com.facebook.presto.spi.block.SingleRowBlock;
import com.facebook.presto.spi.type.Type;
import com.google.common.collect.ImmutableList;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import static com.facebook.presto.spi.block.RowBlock.fromFieldBlocks;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static io.airlift.slice.Slices.utf8Slice;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.assertj.core.api.Fail.fail;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class TestRowBlock
extends AbstractTestBlock
{
@Test
void testWithVarcharBigint()
{
List<Type> fieldTypes = ImmutableList.of(VARCHAR, BIGINT);
List<Object>[] testRows = generateTestRows(fieldTypes, 100);
testWith(fieldTypes, testRows);
testWith(fieldTypes, alternatingNullValues(testRows));
}
@Test
public void testEstimatedDataSizeForStats()
{
List<Type> fieldTypes = ImmutableList.of(VARCHAR, BIGINT);
List<Object>[] expectedValues = alternatingNullValues(generateTestRows(fieldTypes, 100));
BlockBuilder blockBuilder = createBlockBuilderWithValues(fieldTypes, expectedValues);
Block block = blockBuilder.build();
assertEquals(block.getPositionCount(), expectedValues.length);
for (int i = 0; i < block.getPositionCount(); i++) {
int expectedSize = getExpectedEstimatedDataSize(expectedValues[i]);
assertEquals(blockBuilder.getEstimatedDataSizeForStats(i), expectedSize);
assertEquals(block.getEstimatedDataSizeForStats(i), expectedSize);
}
}
private int getExpectedEstimatedDataSize(List<Object> row)
{
if (row == null) {
return 0;
}
int size = 0;
size += row.get(0) == null ? 0 : ((String) row.get(0)).length();
size += row.get(1) == null ? 0 : Long.BYTES;
return size;
}
@Test
public void testCompactBlock()
{
Block emptyBlock = new ByteArrayBlock(0, Optional.empty(), new byte[0]);
Block compactFieldBlock1 = new ByteArrayBlock(5, Optional.empty(), createExpectedValue(5).getBytes());
Block compactFieldBlock2 = new ByteArrayBlock(5, Optional.empty(), createExpectedValue(5).getBytes());
Block incompactFiledBlock1 = new ByteArrayBlock(5, Optional.empty(), createExpectedValue(6).getBytes());
Block incompactFiledBlock2 = new ByteArrayBlock(5, Optional.empty(), createExpectedValue(6).getBytes());
boolean[] rowIsNull = {false, true, false, false, false, false};
assertCompact(fromFieldBlocks(0, Optional.empty(), new Block[] {emptyBlock, emptyBlock}));
assertCompact(fromFieldBlocks(rowIsNull.length, Optional.of(rowIsNull), new Block[] {compactFieldBlock1, compactFieldBlock2}));
// TODO: add test case for a sliced RowBlock
// underlying field blocks are not compact
testIncompactBlock(fromFieldBlocks(rowIsNull.length, Optional.of(rowIsNull), new Block[] {incompactFiledBlock1, incompactFiledBlock2}));
testIncompactBlock(fromFieldBlocks(rowIsNull.length, Optional.of(rowIsNull), new Block[] {incompactFiledBlock1, incompactFiledBlock2}));
}
private void testWith(List<Type> fieldTypes, List<Object>[] expectedValues)
{
BlockBuilder blockBuilder = createBlockBuilderWithValues(fieldTypes, expectedValues);
assertBlock(blockBuilder, () -> blockBuilder.newBlockBuilderLike(null), expectedValues);
assertBlock(blockBuilder.build(), () -> blockBuilder.newBlockBuilderLike(null), expectedValues);
IntArrayList positionList = generatePositionList(expectedValues.length, expectedValues.length / 2);
assertBlockFilteredPositions(expectedValues, blockBuilder, () -> blockBuilder.newBlockBuilderLike(null), positionList.toIntArray());
assertBlockFilteredPositions(expectedValues, blockBuilder.build(), () -> blockBuilder.newBlockBuilderLike(null), positionList.toIntArray());
}
private BlockBuilder createBlockBuilderWithValues(List<Type> fieldTypes, List<Object>[] rows)
{
BlockBuilder rowBlockBuilder = new RowBlockBuilder(fieldTypes, null, 1);
for (List<Object> row : rows) {
if (row == null) {
rowBlockBuilder.appendNull();
}
else {
BlockBuilder singleRowBlockWriter = rowBlockBuilder.beginBlockEntry();
for (Object fieldValue : row) {
if (fieldValue == null) {
singleRowBlockWriter.appendNull();
}
else {
if (fieldValue instanceof Long) {
BIGINT.writeLong(singleRowBlockWriter, ((Long) fieldValue).longValue());
}
else if (fieldValue instanceof String) {
VARCHAR.writeSlice(singleRowBlockWriter, utf8Slice((String) fieldValue));
}
else {
throw new IllegalArgumentException();
}
}
}
rowBlockBuilder.closeEntry();
}
}
return rowBlockBuilder;
}
@Override
protected <T> void assertCheckedPositionValue(Block block, int position, T expectedValue)
{
if (expectedValue instanceof List) {
assertValue(block, position, (List<Object>) expectedValue);
return;
}
super.assertCheckedPositionValue(block, position, expectedValue);
}
@Override
protected <T> void assertPositionValueUnchecked(Block block, int internalPosition, T expectedValue)
{
if (expectedValue instanceof List) {
assertValueUnchecked(block, internalPosition, (List<Object>) expectedValue);
return;
}
super.assertPositionValueUnchecked(block, internalPosition, expectedValue);
}
private void assertValue(Block rowBlock, int position, List<Object> row)
{
// null rows are handled by assertPositionValue
requireNonNull(row, "row is null");
assertFalse(rowBlock.isNull(position));
SingleRowBlock singleRowBlock = (SingleRowBlock) rowBlock.getBlock(position);
assertEquals(singleRowBlock.getPositionCount(), row.size());
for (int i = 0; i < row.size(); i++) {
Object fieldValue = row.get(i);
if (fieldValue == null) {
assertTrue(singleRowBlock.isNull(i));
}
else {
if (fieldValue instanceof Long) {
assertEquals(BIGINT.getLong(singleRowBlock, i), ((Long) fieldValue).longValue());
}
else if (fieldValue instanceof String) {
assertEquals(VARCHAR.getSlice(singleRowBlock, i), utf8Slice((String) fieldValue));
}
else {
throw new IllegalArgumentException();
}
}
}
}
private void assertValueUnchecked(Block rowBlock, int internalPosition, List<Object> row)
{
// null rows are handled by assertPositionValue
requireNonNull(row, "row is null");
assertFalse(rowBlock.isNullUnchecked(internalPosition));
SingleRowBlock singleRowBlock = (SingleRowBlock) rowBlock.getBlockUnchecked(internalPosition);
assertEquals(singleRowBlock.getPositionCount(), row.size());
for (int i = 0; i < row.size(); i++) {
Object fieldValue = row.get(i);
if (fieldValue == null) {
assertTrue(singleRowBlock.isNullUnchecked(i + singleRowBlock.getOffsetBase()));
}
else {
if (fieldValue instanceof Long) {
assertEquals(BIGINT.getLongUnchecked(singleRowBlock, i + singleRowBlock.getOffsetBase()), ((Long) fieldValue).longValue());
}
else if (fieldValue instanceof String) {
assertEquals(VARCHAR.getSliceUnchecked(singleRowBlock, i + singleRowBlock.getOffsetBase()), utf8Slice((String) fieldValue));
}
else {
fail("Unexpected type: " + fieldValue.getClass().getSimpleName());
}
}
}
}
private List<Object>[] generateTestRows(List<Type> fieldTypes, int numRows)
{
List<Object>[] testRows = new List[numRows];
for (int i = 0; i < numRows; i++) {
List<Object> testRow = new ArrayList<>(fieldTypes.size());
for (int j = 0; j < fieldTypes.size(); j++) {
int cellId = i * fieldTypes.size() + j;
if (cellId % 7 == 3) {
// Put null value for every 7 cells
testRow.add(null);
}
else {
if (fieldTypes.get(j) == BIGINT) {
testRow.add(i * 100L + j);
}
else if (fieldTypes.get(j) == VARCHAR) {
testRow.add(format("field(%s, %s)", i, j));
}
else {
throw new IllegalArgumentException();
}
}
}
testRows[i] = testRow;
}
return testRows;
}
private IntArrayList generatePositionList(int numRows, int numPositions)
{
IntArrayList positions = new IntArrayList(numPositions);
for (int i = 0; i < numPositions; i++) {
positions.add((7 * i + 3) % numRows);
}
Collections.sort(positions);
return positions;
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*******************************************************************************/
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.1.1-b02-fcs
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2008.09.17 at 09:44:59 AM IDT
//
package org.apache.wink.common.model.opensearch;
import java.math.BigInteger;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlElementDecl;
import javax.xml.bind.annotation.XmlRegistry;
import javax.xml.namespace.QName;
/**
* This object contains factory methods for each Java content interface and Java
* element interface generated in the org.apache.wink.common.model.opensearch
* package.
* <p>
* An ObjectFactory allows you to programatically construct new instances of the
* Java representation for XML content. The Java representation of XML content
* can consist of schema derived interfaces and classes representing the binding
* of schema type definitions, element declarations and model groups. Factory
* methods for each of these are provided in this class.
*/
@XmlRegistry
public class ObjectFactory {
private final static QName _Image_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Image"); //$NON-NLS-1$
private final static QName _ItemsPerPage_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"itemsPerPage"); //$NON-NLS-1$
private final static QName _ShortName_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"ShortName"); //$NON-NLS-1$
private final static QName _TotalResults_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"totalResults"); //$NON-NLS-1$
private final static QName _Query_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Query"); //$NON-NLS-1$
private final static QName _Developer_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Developer"); //$NON-NLS-1$
private final static QName _OpenSearchDescription_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"OpenSearchDescription"); //$NON-NLS-1$
private final static QName _Contact_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Contact"); //$NON-NLS-1$
private final static QName _Attribution_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Attribution"); //$NON-NLS-1$
private final static QName _SyndicationRight_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"SyndicationRight"); //$NON-NLS-1$
private final static QName _Tags_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Tags"); //$NON-NLS-1$
private final static QName _Language_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Language"); //$NON-NLS-1$
private final static QName _OutputEncoding_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"OutputEncoding"); //$NON-NLS-1$
private final static QName _InputEncoding_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"InputEncoding"); //$NON-NLS-1$
private final static QName _Url_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Url"); //$NON-NLS-1$
private final static QName _StartIndex_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"startIndex"); //$NON-NLS-1$
private final static QName _LongName_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"LongName"); //$NON-NLS-1$
private final static QName _Description_QNAME =
new QName(
"http://a9.com/-/spec/opensearch/1.1/", //$NON-NLS-1$
"Description"); //$NON-NLS-1$
/**
* Create a new ObjectFactory that can be used to create new instances of
* schema derived classes for package:
* org.apache.wink.common.model.opensearch
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link OpenSearchUrl }
*/
public OpenSearchUrl createOpenSearchUrl() {
return new OpenSearchUrl();
}
/**
* Create an instance of {@link OpenSearchImage }
*/
public OpenSearchImage createOpenSearchImage() {
return new OpenSearchImage();
}
/**
* Create an instance of {@link OpenSearchDescription }
*/
public OpenSearchDescription createOpenSearchDescription() {
return new OpenSearchDescription();
}
/**
* Create an instance of {@link OpenSearchQuery }
*/
public OpenSearchQuery createOpenSearchQuery() {
return new OpenSearchQuery();
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link OpenSearchImage }
* {@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Image")
public JAXBElement<OpenSearchImage> createImage(OpenSearchImage value) {
return new JAXBElement<OpenSearchImage>(_Image_QNAME, OpenSearchImage.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link BigInteger }
* {@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "itemsPerPage")
public JAXBElement<BigInteger> createItemsPerPage(BigInteger value) {
return new JAXBElement<BigInteger>(_ItemsPerPage_QNAME, BigInteger.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "ShortName")
public JAXBElement<String> createShortName(String value) {
return new JAXBElement<String>(_ShortName_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link BigInteger }
* {@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "totalResults")
public JAXBElement<BigInteger> createTotalResults(BigInteger value) {
return new JAXBElement<BigInteger>(_TotalResults_QNAME, BigInteger.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link OpenSearchQuery }
* {@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Query")
public JAXBElement<OpenSearchQuery> createQuery(OpenSearchQuery value) {
return new JAXBElement<OpenSearchQuery>(_Query_QNAME, OpenSearchQuery.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Developer")
public JAXBElement<String> createDeveloper(String value) {
return new JAXBElement<String>(_Developer_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}
* {@link OpenSearchDescription }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "OpenSearchDescription")
public JAXBElement<OpenSearchDescription> createOpenSearchDescription(OpenSearchDescription value) {
return new JAXBElement<OpenSearchDescription>(_OpenSearchDescription_QNAME,
OpenSearchDescription.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Contact")
public JAXBElement<String> createContact(String value) {
return new JAXBElement<String>(_Contact_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Attribution")
public JAXBElement<String> createAttribution(String value) {
return new JAXBElement<String>(_Attribution_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "SyndicationRight", defaultValue = "open")
public JAXBElement<String> createSyndicationRight(String value) {
return new JAXBElement<String>(_SyndicationRight_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Tags")
public JAXBElement<String> createTags(String value) {
return new JAXBElement<String>(_Tags_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Language")
public JAXBElement<String> createLanguage(String value) {
return new JAXBElement<String>(_Language_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "OutputEncoding", defaultValue = "UTF-8")
public JAXBElement<String> createOutputEncoding(String value) {
return new JAXBElement<String>(_OutputEncoding_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "InputEncoding", defaultValue = "UTF-8")
public JAXBElement<String> createInputEncoding(String value) {
return new JAXBElement<String>(_InputEncoding_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link OpenSearchUrl }
* {@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Url")
public JAXBElement<OpenSearchUrl> createUrl(OpenSearchUrl value) {
return new JAXBElement<OpenSearchUrl>(_Url_QNAME, OpenSearchUrl.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link BigInteger }
* {@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "startIndex")
public JAXBElement<BigInteger> createStartIndex(BigInteger value) {
return new JAXBElement<BigInteger>(_StartIndex_QNAME, BigInteger.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "LongName")
public JAXBElement<String> createLongName(String value) {
return new JAXBElement<String>(_LongName_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}
*/
@XmlElementDecl(namespace = "http://a9.com/-/spec/opensearch/1.1/", name = "Description")
public JAXBElement<String> createDescription(String value) {
return new JAXBElement<String>(_Description_QNAME, String.class, null, value);
}
}
| |
/*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.impl.scripting;
import com.evolveum.midpoint.model.api.ScriptExecutionException;
import com.evolveum.midpoint.model.impl.scripting.expressions.SearchEvaluator;
import com.evolveum.midpoint.model.impl.scripting.expressions.SelectEvaluator;
import com.evolveum.midpoint.model.impl.scripting.helpers.JaxbHelper;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.prism.marshaller.QueryConvertor;
import com.evolveum.midpoint.prism.query.ObjectFilter;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.task.api.TaskManager;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ActionExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ExecuteScriptType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ExpressionPipelineType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ExpressionSequenceType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.FilterExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ForeachExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ObjectFactory;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ScriptingExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.SearchExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.SelectExpressionType;
import com.evolveum.prism.xml.ns._public.types_3.RawType;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang.Validate;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.xml.bind.JAXBElement;
import javax.xml.namespace.QName;
import java.util.HashMap;
import java.util.Map;
/**
* Main entry point for evaluating scripting expressions.
*
* @author mederly
*/
@Component
public class ScriptingExpressionEvaluator {
private static final Trace LOGGER = TraceManager.getTrace(ScriptingExpressionEvaluator.class);
private static final String DOT_CLASS = ScriptingExpressionEvaluator.class + ".";
@Autowired
private TaskManager taskManager;
@Autowired
private SearchEvaluator searchEvaluator;
@Autowired
private SelectEvaluator selectEvaluator;
@Autowired
private JaxbHelper jaxbHelper;
@Autowired
private PrismContext prismContext;
private ObjectFactory objectFactory = new ObjectFactory();
private Map<String,ActionExecutor> actionExecutors = new HashMap<>();
/**
* Asynchronously executes simple scripting expressions, consisting of one search command and one action.
*
* @param objectType Object type to search (e.g. c:UserType)
* @param filter Filter to be applied (ObjectFilter)
* @param actionName Action to be executed on objects found (e.g. "disable", "delete", "recompute", etc).
* @param task Task in context of which the script should execute. The task should be "clean", i.e.
* (1) transient, (2) without any handler. This method puts the task into background,
* and assigns ScriptExecutionTaskHandler to it, to execute the script.
* @param parentResult
* @throws SchemaException
*/
public void evaluateExpressionInBackground(QName objectType, ObjectFilter filter, String actionName, Task task, OperationResult parentResult) throws SchemaException {
Validate.notNull(objectType);
Validate.notNull(actionName);
Validate.notNull(task);
SearchExpressionType search = new SearchExpressionType();
search.setType(objectType);
if (filter != null) {
search.setSearchFilter(QueryConvertor.createSearchFilterType(filter, prismContext));
}
ActionExpressionType action = new ActionExpressionType();
action.setType(actionName);
search.setScriptingExpression(objectFactory.createAction(action));
evaluateExpressionInBackground(search, task, parentResult);
}
/**
* Asynchronously executes any scripting expression.
*
* @param expression Expression to be executed.
* @param task Task in context of which the script should execute. The task should be "clean", i.e.
* (1) transient, (2) without any handler. This method puts the task into background,
* and assigns ScriptExecutionTaskHandler to it, to execute the script.
* @param parentResult
* @throws SchemaException
*/
public void evaluateExpressionInBackground(ScriptingExpressionType expression, Task task, OperationResult parentResult) throws SchemaException {
OperationResult result = parentResult.createSubresult(DOT_CLASS + "evaluateExpressionInBackground");
if (!task.isTransient()) {
throw new IllegalStateException("Task must be transient");
}
if (task.getHandlerUri() != null) {
throw new IllegalStateException("Task must not have a handler");
}
ExecuteScriptType executeScriptType = new ExecuteScriptType();
executeScriptType.setScriptingExpression(jaxbHelper.toJaxbElement(expression));
task.setExtensionPropertyValue(SchemaConstants.SE_EXECUTE_SCRIPT, executeScriptType);
task.setHandlerUri(ScriptExecutionTaskHandler.HANDLER_URI);
taskManager.switchToBackground(task, result);
result.computeStatus();
}
/**
* Entry point to _synchronous_ script execution, with no input data.
*
* @param expression Scripting expression to execute.
* @param task Task in context of which the script should execute (in foreground!)
* @param result Operation result
* @return ExecutionContext, from which the caller can retrieve the output data via getFinalOutput() method,
* and the console output via getConsoleOutput() method.
* @throws com.evolveum.midpoint.model.api.ScriptExecutionException
*/
public ExecutionContext evaluateExpression(ScriptingExpressionType expression, Task task, OperationResult result) throws ScriptExecutionException {
ExecutionContext context = new ExecutionContext(task);
Data output;
try {
output = evaluateExpression(expression, Data.createEmpty(), context, result);
} catch (RuntimeException e) {
result.recordFatalError("Couldn't execute script", e);
throw new ScriptExecutionException("Couldn't execute script: " + e.getMessage(), e);
}
result.computeStatusIfUnknown();
context.setFinalOutput(output);
return context;
}
// public ExecutionContext evaluateExpression(JAXBElement<? extends ScriptingExpressionType> expression, Task task, OperationResult result) throws ScriptExecutionException {
// ExecutionContext context = new ExecutionContext(task);
// Data output = evaluateExpression(expression.getValue(), Data.createEmpty(), context, result);
// result.computeStatusIfUnknown();
// context.setFinalOutput(output);
// return context;
// }
public ExecutionContext evaluateExpression(ExecuteScriptType executeScript, Task task, OperationResult result) throws ScriptExecutionException {
return evaluateExpression(executeScript.getScriptingExpression().getValue(), task, result);
}
// use in tests only (we need the task at least to report progress/statistics)
public ExecutionContext evaluateExpression(ScriptingExpressionType expression, OperationResult result) throws ScriptExecutionException {
Task task = taskManager.createTaskInstance();
return evaluateExpression(expression, task, result);
}
public Data evaluateExpression(JAXBElement<? extends ScriptingExpressionType> expression, Data input, ExecutionContext context, OperationResult parentResult) throws ScriptExecutionException {
return evaluateExpression((ScriptingExpressionType) expression.getValue(), input, context, parentResult);
}
public Data evaluateExpression(ScriptingExpressionType value, Data input, ExecutionContext context, OperationResult parentResult) throws ScriptExecutionException {
OperationResult result = parentResult.createMinorSubresult(DOT_CLASS + "evaluateExpression");
Data output;
if (value instanceof ExpressionPipelineType) {
output = executePipeline((ExpressionPipelineType) value, input, context, result);
} else if (value instanceof ExpressionSequenceType) {
output = executeSequence((ExpressionSequenceType) value, input, context, result);
} else if (value instanceof ForeachExpressionType) {
output = executeForEach((ForeachExpressionType) value, input, context, result);
} else if (value instanceof SelectExpressionType) {
output = selectEvaluator.evaluate((SelectExpressionType) value, input, context, result);
} else if (value instanceof FilterExpressionType) {
output = executeFilter((FilterExpressionType) value, input, context, result);
} else if (value instanceof SearchExpressionType) {
output = searchEvaluator.evaluate((SearchExpressionType) value, input, context, result);
} else if (value instanceof ActionExpressionType) {
output = executeAction((ActionExpressionType) value, input, context, result);
} else {
throw new IllegalArgumentException("Unsupported expression type: " + (value==null?"(null)":value.getClass()));
}
result.computeStatusIfUnknown();
return output;
}
private Data executeAction(ActionExpressionType command, Data input, ExecutionContext context, OperationResult result) throws ScriptExecutionException {
Validate.notNull(command, "command");
Validate.notNull(command.getType(), "command.actionType");
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Executing action {} on {}", command.getType(), input.debugDump());
} else if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Executing action {}", command.getType());
}
ActionExecutor executor = actionExecutors.get(command.getType());
if (executor == null) {
throw new IllegalStateException("Unsupported action type: " + command.getType());
} else {
Data retval = executor.execute(command, input, context, result);
result.setSummarizeSuccesses(true);
result.summarize();
return retval;
}
}
private Data executeFilter(FilterExpressionType command, Data input, ExecutionContext context, OperationResult result) {
throw new NotImplementedException();
}
private Data executeForEach(ForeachExpressionType command, Data input, ExecutionContext context, OperationResult result) {
return null;
}
private Data executePipeline(ExpressionPipelineType pipeline, Data data, ExecutionContext context, OperationResult result) throws ScriptExecutionException {
for (ScriptingExpressionType expressionType : pipeline.getExpression()) {
data = evaluateExpression(expressionType, data, context, result);
}
return data;
}
private Data executeSequence(ExpressionSequenceType sequence, Data input, ExecutionContext context, OperationResult result) throws ScriptExecutionException {
Data lastOutput = null;
for (ScriptingExpressionType expressionType : sequence.getExpression()) {
lastOutput = evaluateExpression(expressionType, input, context, result);
}
return lastOutput;
}
public Data evaluateConstantExpression(@NotNull RawType constant, @Nullable Class<?> expectedClass, ExecutionContext context, String desc, OperationResult result) throws ScriptExecutionException {
try {
// TODO fix this brutal hacking
PrismValue value;
if (expectedClass == null) {
value = constant.getParsedValue(null, null);
} else {
Object object = constant.getParsedRealValue(expectedClass);
if (object instanceof Referencable) {
value = ((Referencable) object).asReferenceValue();
} else if (object instanceof Containerable) {
value = ((Containerable) object).asPrismContainerValue();
} else {
value = new PrismPropertyValue<>(object);
}
}
if (value.isRaw()) {
throw new IllegalStateException("Raw value while " + desc + ": " + value + ". Please specify type of the value.");
}
return Data.createItem(value, prismContext);
} catch (SchemaException e) {
throw new ScriptExecutionException(e.getMessage(), e);
}
}
public Data evaluateConstantStringExpression(RawType constant, ExecutionContext context, OperationResult result) throws ScriptExecutionException {
try {
String value = constant.getParsedRealValue(String.class);
return Data.createItem(new PrismPropertyValue<>(value), prismContext);
} catch (SchemaException e) {
throw new ScriptExecutionException(e.getMessage(), e);
}
}
public void registerActionExecutor(String actionName, ActionExecutor executor) {
actionExecutors.put(actionName, executor);
}
}
| |
/* The contents of this file are subject to the license and copyright terms
* detailed in the license directory at the root of the source tree (also
* available online at http://fedora-commons.org/license/).
*/
package org.fcrepo.client;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.io.IOException;
import java.util.ResourceBundle;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDesktopPane;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JProgressBar;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.JWindow;
import javax.swing.KeyStroke;
import org.apache.axis.AxisFault;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.fcrepo.client.actions.Login;
import org.fcrepo.client.actions.PurgeObject;
import org.fcrepo.client.actions.ViewObject;
import org.fcrepo.client.actions.ViewObjectXML;
import org.fcrepo.client.batch.BatchModify;
import org.fcrepo.client.batch.BatchModifyValidate;
import org.fcrepo.client.console.access.AccessConsole;
import org.fcrepo.client.console.management.ManagementConsole;
import org.fcrepo.client.export.ExportDialog;
import org.fcrepo.client.ingest.IngestDialog;
import org.fcrepo.client.search.Search;
import org.fcrepo.client.utility.ingest.XMLBuilder.OBJECT_TYPE;
import org.fcrepo.common.Constants;
import org.fcrepo.server.access.FedoraAPIA;
import org.fcrepo.server.management.FedoraAPIM;
import org.fcrepo.swing.mdi.MDIDesktopPane;
import org.fcrepo.swing.mdi.WindowMenu;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Fedora Administrator GUI.
*
* @author Chris Wilper
*/
public class Administrator
extends JFrame {
private static final Logger logger =
LoggerFactory.getLogger(Administrator.class);
private static final long serialVersionUID = 1L;
private static MDIDesktopPane s_desktop;
private static int s_maxButtonHeight;
private JDialog m_aboutDialog;
private static File s_lastDir;
protected static File batchtoolLastDir;
//public static ClassLoader cl;
public static JTextArea WATCH_AREA;
private static Administrator s_instance;
private final JLabel m_aboutPic;
private final JLabel m_aboutText;
private static String s_protocol;
private static String s_context;
private static String s_host;
private static int s_port;
private static String s_user;
private static String s_pass;
public static JProgressBar PROGRESS;
public static Downloader DOWNLOADER;
public static Uploader UPLOADER;
public static Color ACTIVE_COLOR = new Color(180, 210, 180);
public static Color INACTIVE_COLOR = new Color(210, 210, 180);
public static Color DELETED_COLOR = new Color(210, 180, 180);
public static Color DEFAULT_COLOR = new Color(185, 185, 185);
public static Color DESKTOP_COLOR = new Color(46, 97, 116); // from website
public static Color BACKGROUND_COLOR;
public static FedoraAPIA APIA = null;
public static FedoraAPIM APIM = null;
public static File BASE_DIR;
public static Administrator INSTANCE = null;
private static ResourceBundle s_const =
ResourceBundle.getBundle("org.fcrepo.client.resources.Client");
public static String VERSION = s_const.getString("version");
public static String BUILD_DATE = s_const.getString("buildDate");
public static final void showErrorDialog(Component parent,
String title,
String explanation,
Exception e) {
if (e instanceof AxisFault) {
StringBuffer authzDetail = new StringBuffer("");
org.w3c.dom.Element[] getFaultDetails =
((AxisFault) e).getFaultDetails();
if (getFaultDetails != null) {
for (Element detail : getFaultDetails) {
if ("Authz".equals(detail.getLocalName())
&& detail.hasChildNodes()) {
NodeList nodeList = detail.getChildNodes();
for (int j = 0; j < nodeList.getLength(); j++) {
authzDetail.append(nodeList.item(j).getNodeValue());
}
}
}
}
if (authzDetail.length() > 0) {
explanation = authzDetail.toString();
}
}
JOptionPane.showMessageDialog(parent,
explanation,
title,
JOptionPane.ERROR_MESSAGE);
}
public Administrator(String protocol,
String host,
int port,
String context,
String user,
String pass) {
super("Fedora Administrator");
INSTANCE = this;
WATCH_AREA = new JTextArea();
WATCH_AREA.setFont(new Font("monospaced", Font.PLAIN, 12));
WATCH_AREA.setCaretPosition(0);
s_maxButtonHeight = new JTextField("test").getPreferredSize().height;
BACKGROUND_COLOR = new JPanel().getBackground();
if (host != null) {
// already must have passed through non-interactive login
try {
//APIA=APIAStubFactory.getStub(protocol, host, port, user, pass);
//APIM=APIMStubFactory.getStub(protocol, host, port, user, pass);
// ******************************************
// NEW: use new client utility class
// FIXME: Get around hardcoding the path in the baseURL
String baseURL =
protocol + "://" + host + ":" + port + "/" + context;
FedoraClient fc = new FedoraClient(baseURL, user, pass);
APIA = fc.getAPIA();
APIM = fc.getAPIM();
//*******************************************
setLoginInfo(protocol, host, port, context, user, pass);
} catch (Exception e) {
APIA = null;
APIM = null;
}
}
if (Constants.FEDORA_HOME != null) {
File f = new File(Constants.FEDORA_HOME);
if (f.exists() && f.isDirectory()) {
BASE_DIR = new File(f, "client");
s_lastDir = BASE_DIR;
}
}
ImageIcon aboutIcon =
new ImageIcon(ClassLoader.getSystemResource("images/client/fedora/aboutadmin.gif"));
m_aboutPic = new JLabel(aboutIcon);
m_aboutText =
new JLabel("<html>"
+ "<p>Copyright 2009-2011, DuraSpace</p>"
+ "<p>Copyright 2008-2009, Fedora Commons, Inc.</p>"
+ "<p>Copyright 2002-2007, The Rector and Visitors of the</p>"
+ "<p>University of Virginia and Cornell University.</p><p></p>"
+ "<p><b>License: </b>This software is subject to the terms of the</p>"
+ "<p>Apache License, Version 2.0 (the \"License\"); you may not use</p>"
+ "<p>this software except in compliance with the License. You may</p>"
+ "<p>obtain a copy of the License at:</p>"
+ "<blockquote>http://www.apache.org/licenses/</blockquote><p></p>"
+ "<p>Software distributed under the License is distributed on an \"AS IS\"</p>"
+ "<p>basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.</p>"
+ "<p>See the License for the specific language governing rights and</p>"
+ "<p>limitations under the License.</p><p></p>"
+ "<p><b>Version: </b>" + VERSION + "</p>"
+ "<p><b>Build Date: </b>" + BUILD_DATE + "</p>"
+ "<p></p>"
+ "<p>See http://fedora-commons.org/ for more information.</p></html>");
m_aboutText.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
JPanel splashPicAndText = new JPanel();
splashPicAndText.setLayout(new BorderLayout());
splashPicAndText.setBorder(BorderFactory.createLineBorder(Color.black,
5));
splashPicAndText.add(m_aboutPic, BorderLayout.CENTER);
splashPicAndText.add(m_aboutText, BorderLayout.SOUTH);
JWindow splashScreen = new JWindow();
splashScreen.getContentPane().add(splashPicAndText);
splashScreen.pack();
int xSize = splashScreen.getWidth();
int ySize = splashScreen.getHeight();
Dimension screenSize = getToolkit().getScreenSize();
int xLoc = screenSize.width / 2 - xSize / 2;
int yLoc = screenSize.height / 2 - ySize / 2;
splashScreen.setBounds(xLoc, yLoc, xSize, ySize);
splashScreen.setVisible(true);
ImageIcon fedoraIcon =
new ImageIcon(ClassLoader.getSystemResource("images/client/fedora/fedora-icon16.gif"));
setIconImage(fedoraIcon.getImage());
JPanel mainPanel = new JPanel();
mainPanel.setLayout(new BorderLayout());
s_desktop = new MDIDesktopPane();
s_desktop.setBackground(DESKTOP_COLOR);
s_desktop.setVisible(true);
mainPanel.add(new JScrollPane(s_desktop), BorderLayout.CENTER);
PROGRESS = new JProgressBar(0, 2000);
PROGRESS.setValue(0);
PROGRESS.setStringPainted(true);
PROGRESS.setString("");
mainPanel.add(PROGRESS, BorderLayout.SOUTH);
getContentPane().add(mainPanel);
setJMenuBar(createMenuBar());
//Make dragging faster:
//s_desktop.putClientProperty("JDesktopPane.dragMode", "outline");
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
dispose();
System.exit(0);
}
});
splashScreen.setVisible(false);
s_instance = this;
int xs = 1000;
int ys = 744;
Dimension sz = getToolkit().getScreenSize();
int xl = sz.width / 2 - xs / 2;
int yl = sz.height / 2 - ys / 2;
setBounds(xl, yl, xs, ys);
setVisible(true);
if (APIA == null || APIM == null) {
new LoginDialog(); // first thing to do... launch login dialog
}
if (APIA == null || APIM == null) {
dispose();
System.exit(0);
}
}
public static JDesktopPane getDesktop() {
return s_desktop;
}
public void setLoginInfo(String protocol,
String host,
int port,
String context,
String user,
String pass) {
s_protocol = protocol;
s_host = host;
s_port = port;
s_context = context;
s_user = user;
s_pass = pass;
try {
DOWNLOADER = new Downloader(host, port, context, user, pass);
UPLOADER = new Uploader(host, port, context, user, pass);
} catch (IOException ioe) {
}
doTitle();
}
public void doTitle() {
setTitle("Fedora Administrator - " + s_user + "@" + s_host + ":"
+ s_port);
}
public static Administrator getInstance() {
return s_instance;
}
public static JComponent constrainHeight(JComponent component) {
int preferredWidth = component.getPreferredSize().width;
component.setPreferredSize(new Dimension(preferredWidth,
s_maxButtonHeight));
component.setMaximumSize(new Dimension(2048, s_maxButtonHeight));
component.setMinimumSize(new Dimension(preferredWidth,
s_maxButtonHeight));
return component;
}
protected JMenuBar createMenuBar() {
JMenuBar menuBar = new JMenuBar();
JMenu fileMenu = new JMenu("File");
fileMenu.setMnemonic(KeyEvent.VK_F);
// [N]ew
JMenu fileNew = new JMenu("New");
fileNew.setMnemonic(KeyEvent.VK_N);
JMenuItem fileNewObject = new JMenuItem("Data Object", KeyEvent.VK_O);
fileNewObject.setAccelerator(KeyStroke
.getKeyStroke(KeyEvent.VK_N, ActionEvent.CTRL_MASK));
fileNewObject.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new NewObjectDialog(OBJECT_TYPE.dataObject, "New Object");
}
});
JMenuItem fileNewCModel = new JMenuItem("Content Model", KeyEvent.VK_C);
fileNewCModel.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new NewObjectDialog(OBJECT_TYPE.contentModel,
"New Content Model");
}
});
JMenuItem fileNewSDef =
new JMenuItem("Service Definition", KeyEvent.VK_D);
fileNewSDef.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new NewObjectDialog(OBJECT_TYPE.serviceDefinition,
"New Service Definition");
}
});
JMenuItem fileNewSDep =
new JMenuItem("Service Deployment", KeyEvent.VK_M);
fileNewSDep.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new NewObjectDialog(OBJECT_TYPE.serviceDeployment,
"New Service Deployment");
}
});
fileNew.add(fileNewObject);
fileNew.add(fileNewCModel);
fileNew.add(fileNewSDef);
fileNew.add(fileNewSDep);
// [O]pen
JMenuItem fileOpen = new JMenuItem(new ViewObject());
fileOpen.setMnemonic(KeyEvent.VK_O);
fileOpen
.setToolTipText("Launches a viewer/editor for an object and it's components.");
fileOpen.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O,
ActionEvent.CTRL_MASK));
// [I]ngest
JMenu fileIngest = new JMenu("Ingest");
fileIngest.setMnemonic(KeyEvent.VK_I);
JMenu fileIngestOne = new JMenu("One Object");
fileIngestOne.setMnemonic(KeyEvent.VK_O);
JMenuItem fileIngestOneFromFile =
new JMenuItem("From File...", KeyEvent.VK_F);
fileIngestOneFromFile.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new IngestDialog(IngestDialog.ONE_FROM_FILE);
}
});
JMenuItem fileIngestOneFromRepository =
new JMenuItem("From Repository...", KeyEvent.VK_R);
fileIngestOneFromRepository.setAccelerator(KeyStroke
.getKeyStroke(KeyEvent.VK_I, ActionEvent.CTRL_MASK));
fileIngestOneFromRepository.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new IngestDialog(IngestDialog.ONE_FROM_REPOS);
}
});
fileIngestOne.add(fileIngestOneFromFile);
fileIngestOne.add(fileIngestOneFromRepository);
JMenu fileIngestMultiple = new JMenu("Multiple Objects");
fileIngestMultiple.setMnemonic(KeyEvent.VK_M);
JMenuItem fileIngestMultipleFromFile =
new JMenuItem("From Directory...", KeyEvent.VK_D);
fileIngestMultipleFromFile.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new IngestDialog(IngestDialog.MULTI_FROM_DIR);
}
});
JMenuItem fileIngestMultipleFromRepository =
new JMenuItem("From Repository...", KeyEvent.VK_R);
fileIngestMultipleFromRepository
.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new IngestDialog(IngestDialog.MULTI_FROM_REPOS);
}
});
fileIngestMultiple.add(fileIngestMultipleFromFile);
fileIngestMultiple.add(fileIngestMultipleFromRepository);
fileIngest.add(fileIngestOne);
fileIngest.add(fileIngestMultiple);
// [E]xport
JMenu fileExport = new JMenu("Export");
fileExport.setMnemonic(KeyEvent.VK_E);
JMenuItem fileExportObject = new JMenuItem("One Object...");
fileExportObject.setMnemonic(KeyEvent.VK_O);
fileExportObject.setAccelerator(KeyStroke
.getKeyStroke(KeyEvent.VK_E, ActionEvent.CTRL_MASK));
fileExportObject
.setToolTipText("Exports a serialized Digitial Object to disk.");
fileExportObject.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new ExportDialog(ExportDialog.ONE);
}
});
JMenuItem fileExportMultiple = new JMenuItem("Multiple Objects...");
fileExportMultiple.setMnemonic(KeyEvent.VK_M);
fileExportMultiple
.setToolTipText("Exports multiple serialized Digitial Objects to disk.");
fileExportMultiple.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new ExportDialog(ExportDialog.MULTI);
}
});
fileExport.add(fileExportObject);
fileExport.add(fileExportMultiple);
// [V]iew Object XML
JMenuItem fileViewXML = new JMenuItem(new ViewObjectXML());
fileViewXML.setMnemonic(KeyEvent.VK_V);
fileViewXML.setAccelerator(KeyStroke
.getKeyStroke(KeyEvent.VK_V, ActionEvent.CTRL_MASK));
fileViewXML
.setToolTipText("Launches a viewer for the internal XML of an object in the repository.");
// [P]urge Object
JMenuItem filePurge = new JMenuItem(new PurgeObject());
filePurge.setMnemonic(KeyEvent.VK_P);
filePurge.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P,
ActionEvent.CTRL_MASK));
filePurge
.setToolTipText("Permanently removes a Digitial Object from the repository.");
// [L]ogin
JMenuItem fileLogin = new JMenuItem(new Login());
fileLogin.setMnemonic(KeyEvent.VK_R);
fileLogin.setToolTipText("Changes the working repository.");
fileLogin.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_R,
ActionEvent.CTRL_MASK));
// E[x]it
JMenuItem fileExit = new JMenuItem("Exit", KeyEvent.VK_X);
fileExit.setToolTipText("Exits the application");
fileExit.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
dispose();
System.exit(0);
}
});
fileMenu.add(fileNew);
fileMenu.add(fileOpen);
fileMenu.addSeparator();
fileMenu.add(fileIngest);
fileMenu.add(fileExport);
fileMenu.addSeparator();
fileMenu.add(filePurge);
fileMenu.add(fileViewXML);
fileMenu.addSeparator();
fileMenu.add(fileLogin);
fileMenu.add(fileExit);
menuBar.add(fileMenu);
JMenu toolsMenu = new JMenu("Tools");
toolsMenu.setMnemonic(KeyEvent.VK_T);
JMenuItem toolsSearch =
new JMenuItem("Search/Browse Repository", KeyEvent.VK_S);
toolsSearch.setAccelerator(KeyStroke
.getKeyStroke(KeyEvent.VK_S, ActionEvent.CTRL_MASK));
toolsSearch.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
createSearchRepository();
}
});
toolsMenu.add(toolsSearch);
JMenu toolsBatchSubMenu = new JMenu("Batch");
JMenuItem toolsBatchBuild = new JMenuItem("Build Batch"/*
* ,
* KeyEvent.VK_A
*/);
toolsBatchBuild.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
createBatchBuildConsole();
}
});
toolsBatchSubMenu.add(toolsBatchBuild);
JMenuItem toolsBatchBuildIngest =
new JMenuItem("Build and Ingest Batch"/*
* , KeyEvent.VK_A
*/);
toolsBatchBuildIngest.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
createBatchBuildIngestConsole();
}
});
toolsBatchSubMenu.add(toolsBatchBuildIngest);
JMenuItem toolsBatchIngest = new JMenuItem("Ingest Batch"/*
* ,
* KeyEvent.VK_A
*/);
toolsBatchIngest.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
createBatchIngestConsole();
}
});
toolsBatchSubMenu.add(toolsBatchIngest);
JMenu toolsBatchModify = new JMenu("Modify Batch");
toolsBatchModify.setMnemonic(KeyEvent.VK_M);
JMenuItem executeBatchModify =
new JMenuItem("Process Directives", KeyEvent.VK_P);
toolsBatchModify.setToolTipText("Modifies a batch of objects based on "
+ "modify directives specified in a file on disk.");
executeBatchModify.setToolTipText("Run the Batch Modify Utility.");
executeBatchModify.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new BatchModify();
}
});
toolsBatchModify.add(executeBatchModify);
JMenuItem validateBatchModify =
new JMenuItem("Validate Directives File", KeyEvent.VK_V);
validateBatchModify
.setToolTipText("Validate the modify directives file against the batchModify XML Schema.");
validateBatchModify.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
new BatchModifyValidate();
}
});
toolsBatchModify.add(validateBatchModify);
toolsBatchSubMenu.add(toolsBatchModify);
toolsMenu.addSeparator();
toolsMenu.add(toolsBatchSubMenu);
// [A]ccess Console
JMenuItem toolsAccess = new JMenuItem("Access API", KeyEvent.VK_A);
toolsAccess.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
createAccessConsole();
}
});
// [M]anagement Console
JMenuItem toolsManagement =
new JMenuItem("Management API", KeyEvent.VK_M);
toolsManagement.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
createManagementConsole();
}
});
JMenu toolsConsole = new JMenu("Console");
toolsConsole.setMnemonic(KeyEvent.VK_C);
toolsConsole.add(toolsAccess);
toolsConsole.add(toolsManagement);
toolsMenu.add(toolsConsole);
menuBar.add(toolsMenu);
WindowMenu windowMenu = new WindowMenu(s_desktop, "Window");
windowMenu.setMnemonic(KeyEvent.VK_W);
menuBar.add(windowMenu);
// [H]elp
JMenu helpMenu = new JMenu("Help");
helpMenu.setMnemonic(KeyEvent.VK_H);
JMenuItem helpContents = new JMenuItem("Documentation", KeyEvent.VK_D);
String portPart = "";
if (getPort() != 80) {
portPart = ":" + getPort();
}
helpContents.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String documentationURL = "https://wiki.duraspace.org/display/FEDORA/All+Documentation";
JOptionPane.showMessageDialog(getDesktop(),
"For Fedora documentation, see "
+ documentationURL,
"Fedora Documentation",
JOptionPane.INFORMATION_MESSAGE);
}
});
m_aboutDialog = new JDialog(this, "About Fedora Administrator", true);
m_aboutDialog.getContentPane().add(m_aboutPic, BorderLayout.CENTER);
JButton aboutClose = new JButton("Close");
JPanel infoAndButton = new JPanel();
infoAndButton.setLayout(new BorderLayout());
infoAndButton.setBorder(BorderFactory.createEmptyBorder(0, 15, 0, 0));
infoAndButton.add(m_aboutText);
JPanel buttonPane = new JPanel();
buttonPane.setLayout(new BoxLayout(buttonPane, BoxLayout.X_AXIS));
buttonPane.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
buttonPane.add(Box.createHorizontalGlue());
buttonPane.add(aboutClose);
infoAndButton.add(buttonPane, BorderLayout.SOUTH);
m_aboutDialog.getContentPane().add(infoAndButton, BorderLayout.SOUTH);
aboutClose.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
m_aboutDialog.setVisible(false);
}
});
m_aboutDialog.pack();
JMenuItem helpAbout =
new JMenuItem("About Fedora Administrator", KeyEvent.VK_A);
helpAbout.setToolTipText("Gives brief information this application");
helpAbout.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
m_aboutDialog.setLocation(getCenteredPos(m_aboutDialog
.getWidth(), m_aboutDialog.getHeight()));
m_aboutDialog.setVisible(true);
}
});
helpMenu.add(helpContents);
helpMenu.addSeparator();
helpMenu.add(helpAbout);
menuBar.add(helpMenu);
return menuBar;
}
public static File getLastDir() {
return s_lastDir;
}
public static void setLastDir(File f) {
s_lastDir = f;
}
protected void createSearchRepository() {
Search frame = new Search();
frame.setVisible(true);
s_desktop.add(frame);
try {
frame.setSelected(true);
} catch (java.beans.PropertyVetoException e) {
}
}
protected void createManagementConsole() {
ManagementConsole frame = new ManagementConsole(this);
frame.setVisible(true);
s_desktop.add(frame);
try {
frame.setSelected(true);
} catch (java.beans.PropertyVetoException e) {
}
}
protected void createAccessConsole() {
AccessConsole frame = new AccessConsole(this);
frame.setVisible(true);
s_desktop.add(frame);
try {
frame.setSelected(true);
} catch (java.beans.PropertyVetoException e) {
}
}
protected void createBatchBuildConsole() {
BatchBuildGUI frame = new BatchBuildGUI(this, s_desktop);
frame.setVisible(true);
s_desktop.add(frame);
try {
frame.setSelected(true);
} catch (java.beans.PropertyVetoException e) {
}
}
protected void createBatchBuildIngestConsole() {
BatchBuildIngestGUI frame =
new BatchBuildIngestGUI(this,
s_desktop,
s_host,
s_port,
s_context,
s_user,
s_pass);
frame.setVisible(true);
s_desktop.add(frame);
try {
frame.setSelected(true);
} catch (java.beans.PropertyVetoException e) {
}
}
protected void createBatchIngestConsole() {
BatchIngestGUI frame =
new BatchIngestGUI(this,
s_desktop,
s_host,
s_port,
s_context,
s_user,
s_pass);
frame.setVisible(true);
s_desktop.add(frame);
try {
frame.setSelected(true);
} catch (java.beans.PropertyVetoException e) {
}
}
public Point getCenteredPos(int xSize, int ySize) {
Dimension screenSize = getToolkit().getScreenSize();
int maxXPos = screenSize.width - xSize;
int maxYPos = screenSize.height - ySize;
int centerX = getX() + getWidth() / 2;
int centerY = getY() + getHeight() / 2;
int prefXPos = centerX - xSize / 2;
int prefYPos = centerY - ySize / 2;
if (prefXPos < 0) {
prefXPos = 0;
}
if (prefXPos > maxXPos) {
prefXPos = maxXPos;
}
if (prefYPos < 0) {
prefYPos = 0;
}
if (prefYPos > maxYPos) {
prefYPos = maxYPos;
}
return new Point(prefXPos, prefYPos);
}
public static String getProtocol() {
return s_protocol;
}
public static String getHost() {
return s_host;
}
public static int getPort() {
return s_port;
}
public static String getUser() {
return s_user;
}
public static String getPass() {
return s_pass;
}
public static String getAppServContext() {
return s_context;
}
public static void main(String[] args) {
if (args.length == 1) {
int socketTimeoutSeconds = 120;
try {
socketTimeoutSeconds = Integer.parseInt(args[0]);
} catch (Exception e) {
}
APIAStubFactory.SOCKET_TIMEOUT_SECONDS = socketTimeoutSeconds;
logger.info("Socket timeout set to " + socketTimeoutSeconds
+ " seconds");
}
String protocol = null;
String host = null;
int port = 0;
String user = null;
String pass = null;
String context = null;
Administrator administrator =
new Administrator(protocol, host, port, context, user, pass);
}
}
| |
/* Copyright 2015 Braden Farmer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.farmerbb.secondscreen.service;
import android.annotation.TargetApi;
import android.app.ActivityManager;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.hardware.display.DisplayManager;
import android.os.Build;
import androidx.core.app.NotificationCompat;
import androidx.core.content.ContextCompat;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import android.view.Display;
import com.farmerbb.secondscreen.R;
import com.farmerbb.secondscreen.activity.MainActivity;
import com.farmerbb.secondscreen.activity.TaskerQuickActionsActivity;
import com.farmerbb.secondscreen.activity.TurnOffActivity;
import com.farmerbb.secondscreen.support.RotationLockService;
import com.farmerbb.secondscreen.util.U;
// The NotificationService is started whenever a profile is active, whether it be a user-created
// profile or a temporary one created through Quick Actions. In addition to generating and showing
// a notification, the NotificationService is responsible for detecting when the screen is turned
// off and back on,launching either the TempBacklightOnService or ScreenOnService to control the
// backlight. It will also temporarily restore the backlight if a display is connected or
// disconnected while a profile is active. Lastly, it is responsible for showing the
// TurnOffActivity when the DisplayConnectionService is not running.
public final class NotificationService extends RotationLockService {
NotificationCompat.Builder mBuilder;
BroadcastReceiver screenOnReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
SharedPreferences prefMain = U.getPrefMain(NotificationService.this);
SharedPreferences.Editor editor = prefMain.edit();
editor.putLong("screen_on_time", System.currentTimeMillis());
editor.apply();
if(U.castScreenActive(NotificationService.this)) {
Intent serviceIntent = new Intent(context, TempBacklightOnService.class);
U.startService(context, serviceIntent);
} else {
Intent serviceIntent = new Intent(context, ScreenOnService.class);
U.startService(context, serviceIntent);
}
}
};
BroadcastReceiver userPresentReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
SharedPreferences prefMain = U.getPrefMain(NotificationService.this);
long screenOnTime = prefMain.getLong("screen_on_time", 0);
SharedPreferences.Editor editor = prefMain.edit();
editor.remove("screen_on_time");
editor.apply();
if(U.castScreenActive(NotificationService.this)
|| screenOnTime < (System.currentTimeMillis() - 5000)) {
Intent serviceIntent = new Intent(context, ScreenOnService.class);
U.startService(context, serviceIntent);
}
}
};
DisplayManager.DisplayListener listener = new DisplayManager.DisplayListener() {
@Override
public void onDisplayAdded(int displayId) {
Intent intent = new Intent();
intent.setAction(U.SCREEN_CONNECT);
LocalBroadcastManager.getInstance(NotificationService.this).sendBroadcast(intent);
DisplayManager dm = (DisplayManager) getSystemService(DISPLAY_SERVICE);
Display[] displays = dm.getDisplays();
try {
if(displays[displays.length - 2].getDisplayId() == Display.DEFAULT_DISPLAY) {
Intent serviceIntent = new Intent(NotificationService.this, ScreenOnService.class);
U.startService(NotificationService.this, serviceIntent);
}
} catch (ArrayIndexOutOfBoundsException e) { /* Gracefully fail */ }
}
@Override
public void onDisplayChanged(int displayId) {}
@Override
public void onDisplayRemoved(int displayId) {
DisplayManager dm = (DisplayManager) getSystemService(DISPLAY_SERVICE);
Display[] displays = dm.getDisplays();
try {
if(displays[displays.length - 1].getDisplayId() == Display.DEFAULT_DISPLAY) {
Intent serviceIntent = new Intent(NotificationService.this, TempBacklightOnService.class);
U.startService(NotificationService.this, serviceIntent);
SharedPreferences prefMain = U.getPrefMain(NotificationService.this);
ActivityManager manager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
boolean displayConnectionServiceRunning = false;
for(ActivityManager.RunningServiceInfo service : manager.getRunningServices(Integer.MAX_VALUE)) {
if(DisplayConnectionService.class.getName().equals(service.service.getClassName()))
displayConnectionServiceRunning = true;
}
if(prefMain.getBoolean("inactive", true) && !displayConnectionServiceRunning) {
Intent turnOffIntent = new Intent(NotificationService.this, TurnOffActivity.class);
turnOffIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(turnOffIntent);
}
}
} catch (ArrayIndexOutOfBoundsException e) { /* Gracefully fail */ }
}
};
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void onCreate() {
// Register broadcast receivers for screen on and user present
final IntentFilter filter1 = new IntentFilter();
final IntentFilter filter2 = new IntentFilter();
filter1.addAction(Intent.ACTION_SCREEN_ON);
filter1.addAction(Intent.ACTION_DREAMING_STARTED);
filter2.addAction(Intent.ACTION_USER_PRESENT);
registerReceiver(screenOnReceiver, filter1);
registerReceiver(userPresentReceiver, filter2);
DisplayManager manager = (DisplayManager) getSystemService(DISPLAY_SERVICE);
manager.registerDisplayListener(listener, null);
super.onCreate();
}
@Override
public void onDestroy() {
unregisterReceiver(screenOnReceiver);
unregisterReceiver(userPresentReceiver);
DisplayManager manager = (DisplayManager) getSystemService(DISPLAY_SERVICE);
manager.unregisterDisplayListener(listener);
super.onDestroy();
}
@SuppressWarnings("deprecation")
@Override
protected void startService() {
// Load preferences
SharedPreferences prefCurrent = U.getPrefCurrent(this);
SharedPreferences prefMain = U.getPrefMain(this);
// Intent to launch MainActivity when notification is clicked
Intent mainActivityIntent = new Intent(this, MainActivity.class);
PendingIntent mainActivityPendingIntent = PendingIntent.getActivity(this, 0, mainActivityIntent, PendingIntent.FLAG_UPDATE_CURRENT);
String id = "NotificationService";
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
NotificationManager mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
CharSequence name = getString(R.string.profile_active);
int importance = prefMain.getBoolean("hide_notification", false)
? NotificationManager.IMPORTANCE_MIN
: NotificationManager.IMPORTANCE_LOW;
mNotificationManager.createNotificationChannel(new NotificationChannel(id, name, importance));
}
// Build the notification
mBuilder = new NotificationCompat.Builder(this, id)
.setContentIntent(mainActivityPendingIntent)
.setSmallIcon(R.drawable.ic_action_dock)
.setContentTitle(getResources().getString(R.string.notification))
.setContentText(prefCurrent.getString("profile_name", getResources().getString(R.string.action_new)))
.setOngoing(true)
.setShowWhen(false);
// Set action buttons
setActionButton(prefMain.getString("notification_action_2", "turn-off"), prefCurrent, 0);
setActionButton(prefMain.getString("notification_action", "lock-device"), prefCurrent, 1);
// Respect setting to hide notification
if(prefMain.getBoolean("hide_notification", false))
mBuilder.setPriority(Notification.PRIORITY_MIN);
// Set notification color on Lollipop
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mBuilder.setColor(ContextCompat.getColor(this, R.color.primary_dark))
.setVisibility(NotificationCompat.VISIBILITY_PUBLIC);
}
startForeground(1, mBuilder.build());
}
@Override
protected int getScreenOrientation() {
return U.hasSupportLibrary(this) ? -1 : U.getScreenOrientation(this);
}
private void setActionButton(String key, SharedPreferences prefCurrent, int code) {
Intent customIntent;
PendingIntent customPendingIntent = null;
String customString = null;
if(key.equals("turn-off")) {
// Turn Off
customIntent = new Intent(this, TurnOffActivity.class);
customIntent.putExtra("notification", true);
customPendingIntent = PendingIntent.getActivity(this, code, customIntent, PendingIntent.FLAG_UPDATE_CURRENT);
customString = getResources().getStringArray(R.array.pref_notification_action_list)[0];
} else if(key.equals("lock-device")) {
// Lock Device
customIntent = new Intent(this, LockDeviceService.class);
customPendingIntent = PendingIntent.getService(this, code, customIntent, PendingIntent.FLAG_UPDATE_CURRENT);
customString = getResources().getStringArray(R.array.pref_notification_action_list)[2];
} else if(key.equals("quick-actions")) {
// Quick Actions
customIntent = new Intent(this, TaskerQuickActionsActivity.class);
customIntent.putExtra("launched-from-app", true);
customPendingIntent = PendingIntent.getActivity(this, code, customIntent, PendingIntent.FLAG_UPDATE_CURRENT);
customString = getResources().getStringArray(R.array.pref_notification_action_list)[1];
} else if(key.startsWith("temp_")) {
// Toggle
customIntent = new Intent(this, TaskerQuickActionsActivity.class);
customIntent.putExtra(U.KEY, key.equals("temp_immersive") ? "temp_immersive_new" : key);
customIntent.putExtra(U.VALUE, "Toggle");
customPendingIntent = PendingIntent.getActivity(this, code, customIntent, PendingIntent.FLAG_UPDATE_CURRENT);
String onOffString;
switch(key) {
case "temp_backlight_off":
if(prefCurrent.getBoolean("backlight_off", false))
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[0];
else
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[1];
customString = getResources().getString(R.string.quick_backlight) + onOffString;
break;
case "temp_chrome":
if(prefCurrent.getBoolean("chrome", false))
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[1];
else
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[0];
customString = getResources().getString(R.string.desktop) + onOffString;
break;
case "temp_immersive":
case "temp_immersive_new":
switch(prefCurrent.getString("immersive_new", "fallback")) {
case "immersive-mode":
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[1];
break;
default:
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[0];
break;
}
customString = getResources().getString(R.string.immersive) + onOffString;
break;
case "temp_overscan":
if(prefCurrent.getBoolean("overscan", false))
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[1];
else
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[0];
customString = getResources().getString(R.string.quick_overscan) + onOffString;
break;
case "temp_vibration_off":
if(prefCurrent.getBoolean("vibration_off", false))
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[0];
else
onOffString = " " + getResources().getStringArray(R.array.pref_quick_actions)[1];
customString = getResources().getString(R.string.quick_vibration) + onOffString;
break;
}
}
// Add action to notification builder
mBuilder.addAction(0, customString, customPendingIntent);
}
}
| |
/**
* Copyright (c) 2012-2015, jcabi.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met: 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer. 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution. 3) Neither the name of the jcabi.com nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jcabi.aether;
import com.jcabi.aspects.Loggable;
import com.jcabi.log.Logger;
import java.io.File;
import java.util.AbstractSet;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import javax.validation.constraints.NotNull;
import lombok.EqualsAndHashCode;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.DefaultArtifact;
import org.apache.maven.artifact.DependencyResolutionRequiredException;
import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Dependency;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilderException;
import org.apache.maven.shared.dependency.graph.DependencyNode;
/**
* A classpath of a Maven Project.
*
* @author Krzysztof Krason (Krzysztof.Krason@gmail.com)
* @version $Id$
* @checkstyle ClassDataAbstractionCoupling (500 lines)
*/
@EqualsAndHashCode(callSuper = false, of = { "builder", "scopes" })
@Loggable(
value = Loggable.DEBUG,
limit = 1, unit = TimeUnit.MINUTES,
trim = false
)
public final class MavenClasspath extends AbstractSet<File> {
/**
* Maven test scope.
*/
public static final String TEST_SCOPE = "test";
/**
* Maven runtime scope.
*/
public static final String RUNTIME_SCOPE = "runtime";
/**
* Maven system scope.
*/
public static final String SYSTEM_SCOPE = "system";
/**
* Maven compile scope.
*/
public static final String COMPILE_SCOPE = "compile";
/**
* Maven provided scope.
*/
public static final String PROVIDED_SCOPE = "provided";
/**
* Artifact scopes to include.
*/
private final transient Set<String> scopes;
/**
* Dependency graph builder.
*/
private final transient DependencyGraphBuilder builder;
/**
* The current repository/network configuration of Maven.
*/
private final transient MavenSession session;
/**
* Public ctor.
* @param bldr Dependency graph builder.
* @param sess Maven session.
* @param scp The scope to use, e.g. "runtime" or "compile"
*/
public MavenClasspath(@NotNull final DependencyGraphBuilder bldr,
@NotNull final MavenSession sess,
@NotNull final String scp) {
this(bldr, sess, Arrays.asList(scp));
}
/**
* Public ctor.
* @param bldr Dependency graph builder.
* @param sess Maven session.
* @param scps All scopes to include
*/
public MavenClasspath(@NotNull final DependencyGraphBuilder bldr,
@NotNull final MavenSession sess,
@NotNull final Collection<String> scps) {
super();
this.builder = bldr;
this.session = sess;
this.scopes = new HashSet<String>(scps);
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings("PMD.AvoidCatchingGenericException")
public String toString() {
final StringBuilder text = new StringBuilder();
for (final Dependency dep
: this.session.getCurrentProject().getDependencies()) {
if (this.scopes.contains(dep.getScope())) {
try {
text.append(this.root(dep));
// @checkstyle IllegalCatch (1 line)
} catch (final Exception ex) {
text.append(
Logger.format(
"failed to load '%s:%s:%s:%s (%s)' %s",
dep.getGroupId(),
dep.getArtifactId(),
dep.getType(),
dep.getVersion(),
dep.getScope(),
ex.getMessage()
)
);
}
text.append('\n');
}
}
return text.toString();
}
/**
* {@inheritDoc}
*/
@Override
public Iterator<File> iterator() {
return this.fetch().iterator();
}
/**
* {@inheritDoc}
*/
@Override
public int size() {
return this.fetch().size();
}
/**
* Fetch all files found (JAR, ZIP, directories, etc).
* @return Set of files
*/
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
private Set<File> fetch() {
final Set<File> files = new LinkedHashSet<File>(0);
for (final String path : this.elements()) {
files.add(new File(path));
}
try {
files.addAll(this.dependencies(this.graph(), this.scopes));
} catch (final DependencyGraphBuilderException ex) {
throw new IllegalStateException(ex);
}
return files;
}
/**
* Build dependency graph.
* @return Root of dependency graph.
* @throws DependencyGraphBuilderException In case of error.
*/
private DependencyNode graph() throws DependencyGraphBuilderException {
return this.builder.buildDependencyGraph(
this.session.getCurrentProject(),
new ArtifactFilter() {
@Override
public boolean include(
final Artifact artifact) {
return MavenClasspath.this.scopes
.contains(artifact.getScope());
}
}
);
}
/**
* Convert dependency to root artifact.
* @param dep Dependency
* @return Root artifact
*/
private MavenRootArtifact root(final Dependency dep) {
final DefaultArtifact artifact = new DefaultArtifact(
dep.getGroupId(),
dep.getArtifactId(),
dep.getVersion(),
dep.getScope(),
dep.getType(),
dep.getClassifier(),
new DefaultArtifactHandler()
);
try {
final Collection<Artifact> children = new LinkedList<Artifact>();
for (final DependencyNode child : this.graph().getChildren()) {
children.add(child.getArtifact());
}
return new MavenRootArtifact(
artifact,
dep.getExclusions(),
children
);
} catch (final DependencyGraphBuilderException ex) {
throw new IllegalStateException(ex);
}
}
/**
* Get Maven Project elements.
* @return Collection of them
*/
private Collection<String> elements() {
final Collection<String> elements = new LinkedList<String>();
try {
if (this.scopes.contains(TEST_SCOPE)) {
elements.addAll(
this.session.getCurrentProject().getTestClasspathElements()
);
}
if (this.scopes.contains(RUNTIME_SCOPE)) {
elements.addAll(
this.session.getCurrentProject()
.getRuntimeClasspathElements()
);
}
if (this.scopes.contains(SYSTEM_SCOPE)) {
elements.addAll(
this.session.getCurrentProject()
.getSystemClasspathElements()
);
}
if (this.scopes.contains(COMPILE_SCOPE)
|| this.scopes.contains(PROVIDED_SCOPE)) {
elements.addAll(
this.session.getCurrentProject()
.getCompileClasspathElements()
);
}
} catch (final DependencyResolutionRequiredException ex) {
throw new IllegalStateException("Failed to read classpath", ex);
}
return elements;
}
/**
* Retrieve dependencies for from given node and scope.
* @param node Node to traverse.
* @param scps Scopes to use.
* @return Collection of dependency files.
*/
private Collection<File> dependencies(final DependencyNode node,
final Collection<String> scps) {
final Artifact artifact = node.getArtifact();
final Collection<File> files = new LinkedList<File>();
if ((artifact.getScope() == null)
|| scps.contains(artifact.getScope())) {
if (artifact.getScope() == null) {
files.add(artifact.getFile());
} else {
files.add(
this.session.getLocalRepository().find(artifact).getFile()
);
}
for (final DependencyNode child : node.getChildren()) {
if (child.getArtifact().compareTo(node.getArtifact()) != 0) {
files.addAll(this.dependencies(child, scps));
}
}
}
return files;
}
}
| |
/**
* Copyright (c) 2007-2009 Alysson Bessani, Eduardo Alchieri, Paulo Sousa, and the authors indicated in the @author tags
*
* This file is part of SMaRt.
*
* SMaRt is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* SMaRt is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with SMaRt. If not, see <http://www.gnu.org/licenses/>.
*/
package bftsmart.paxosatwar.roles;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import bftsmart.communication.ServerCommunicationSystem;
import bftsmart.paxosatwar.executionmanager.Execution;
import bftsmart.paxosatwar.executionmanager.ExecutionManager;
import bftsmart.paxosatwar.executionmanager.LeaderModule;
import bftsmart.paxosatwar.executionmanager.Round;
import bftsmart.paxosatwar.messages.MessageFactory;
import bftsmart.paxosatwar.messages.PaxosMessage;
import bftsmart.reconfiguration.ServerViewManager;
import bftsmart.tom.core.TOMLayer;
import bftsmart.tom.util.Logger;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.HashMap;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.spec.SecretKeySpec;
/**
* This class represents the acceptor role in the consensus protocol.
* This class work together with the TOMLayer class in order to
* supply a atomic multicast service.
*
* @author Alysson Bessani
*/
public final class Acceptor {
private int me; // This replica ID
private ExecutionManager executionManager; // Execution manager of consensus's executions
private MessageFactory factory; // Factory for PaW messages
private ServerCommunicationSystem communication; // Replicas comunication system
private LeaderModule leaderModule; // Manager for information about leaders
private TOMLayer tomLayer; // TOM layer
private ServerViewManager reconfManager;
private Cipher cipher;
/**
* Creates a new instance of Acceptor.
* @param communication Replicas communication system
* @param factory Message factory for PaW messages
* @param verifier Proof verifier
* @param conf TOM configuration
*/
public Acceptor(ServerCommunicationSystem communication, MessageFactory factory,
LeaderModule lm, ServerViewManager manager) {
this.communication = communication;
this.me = manager.getStaticConf().getProcessId();
this.factory = factory;
this.leaderModule = lm;
this.reconfManager = manager;
try {
this.cipher = Cipher.getInstance("DES/ECB/PKCS5Padding");
} catch (NoSuchAlgorithmException | NoSuchPaddingException ex) {
ex.printStackTrace();
}
}
public MessageFactory getFactory() {
return factory;
}
/**
* Sets the execution manager for this acceptor
* @param manager Execution manager for this acceptor
*/
public void setExecutionManager(ExecutionManager manager) {
this.executionManager = manager;
}
/**
* Sets the TOM layer for this acceptor
* @param tom TOM layer for this acceptor
*/
public void setTOMLayer(TOMLayer tom) {
this.tomLayer = tom;
}
/**
* Called by communication layer to delivery Paxos messages. This method
* only verifies if the message can be executed and calls process message
* (storing it on an out of context message buffer if this is not the case)
*
* @param msg Paxos messages delivered by the communication layer
*/
public final void deliver(PaxosMessage msg) {
if (executionManager.checkLimits(msg)) {
Logger.println("processing paxos msg with id " + msg.getNumber());
processMessage(msg);
} else {
Logger.println("out of context msg with id " + msg.getNumber());
tomLayer.processOutOfContext();
}
}
/**
* Called when a Paxos message is received or when a out of context message must be processed.
* It processes the received message according to its type
*
* @param msg The message to be processed
*/
public final void processMessage(PaxosMessage msg) {
Execution execution = executionManager.getExecution(msg.getNumber());
execution.lock.lock();
Round round = execution.getRound(msg.getRound(), reconfManager);
switch (msg.getPaxosType()){
case MessageFactory.PROPOSE:{
proposeReceived(round, msg);
}break;
case MessageFactory.WEAK:{
weakAcceptReceived(round, msg.getSender(), msg.getValue());
}break;
case MessageFactory.STRONG:{
strongAcceptReceived(round, msg);
}
}
execution.lock.unlock();
}
/**
* Called when a PROPOSE message is received or when processing a formerly out of context propose which
* is know belongs to the current execution.
*
* @param msg The PROPOSE message to by processed
*/
public void proposeReceived(Round round, PaxosMessage msg) {
int eid = round.getExecution().getId();
Logger.println("(Acceptor.proposeReceived) PROPOSE for consensus " + eid);
if (msg.getSender() == leaderModule.getCurrentLeader()) {
executePropose(round, msg.getValue());
} else {
Logger.println("Propose received is not from the expected leader");
}
}
/**
* Executes actions related to a proposed value.
*
* @param round the current round of the execution
* @param value Value that is proposed
*/
private void executePropose(Round round, byte[] value) {
int eid = round.getExecution().getId();
Logger.println("(Acceptor.executePropose) executing propose for " + eid + "," + round.getNumber());
long consensusStartTime = System.nanoTime();
if(round.propValue == null) { //only accept one propose per round
round.propValue = value;
round.propValueHash = tomLayer.computeHash(value);
/*** LEADER CHANGE CODE ********/
round.getExecution().addWritten(value);
/*****************************************/
//start this execution if it is not already running
if (eid == tomLayer.getLastExec() + 1) {
tomLayer.setInExec(eid);
}
round.deserializedPropValue = tomLayer.checkProposedValue(value, true);
if (round.deserializedPropValue != null && !round.isWeakSetted(me)) {
if(round.getExecution().getLearner().firstMessageProposed == null) {
round.getExecution().getLearner().firstMessageProposed = round.deserializedPropValue[0];
}
round.getExecution().getLearner().firstMessageProposed.consensusStartTime = consensusStartTime;
round.getExecution().getLearner().firstMessageProposed.proposeReceivedTime = System.nanoTime();
Logger.println("(Acceptor.executePropose) sending weak for " + eid);
round.setWeak(me, round.propValueHash);
round.getExecution().getLearner().firstMessageProposed.weakSentTime = System.nanoTime();
communication.send(this.reconfManager.getCurrentViewOtherAcceptors(),
factory.createWeak(eid, round.getNumber(), round.propValueHash));
Logger.println("(Acceptor.executePropose) weak sent for " + eid);
computeWeak(eid, round, round.propValueHash);
Logger.println("(Acceptor.executePropose) weak computed for " + eid);
executionManager.processOutOfContext(round.getExecution());
}
}
}
/**
* Called when a WEAK message is received
*
* @param round Round of the receives message
* @param a Replica that sent the message
* @param value Value sent in the message
*/
private void weakAcceptReceived(Round round, int a, byte[] value) {
int eid = round.getExecution().getId();
Logger.println("(Acceptor.weakAcceptReceived) WEAK from " + a + " for consensus " + eid);
round.setWeak(a, value);
computeWeak(eid, round, value);
}
/**
* Computes weakly accepted values according to the standard PaW specification
* (sends STRONG/DECIDE messages, according to the number of weakly accepted
* values received).
*
* @param eid Execution ID of the received message
* @param round Round of the receives message
* @param value Value sent in the message
*/
private void computeWeak(int eid, Round round, byte[] value) {
int weakAccepted = round.countWeak(value);
Logger.println("(Acceptor.computeWeak) I have " + weakAccepted +
" weaks for " + eid + "," + round.getNumber());
if (weakAccepted > reconfManager.getQuorumStrong() && Arrays.equals(value, round.propValueHash)) { // Can a send a STRONG message?
if (!round.isStrongSetted(me)) {
Logger.println("(Acceptor.computeWeak) sending STRONG for " + eid);
/**** LEADER CHANGE CODE! ******/
round.getExecution().setQuorumWeaks(value);
/*****************************************/
round.setStrong(me, value);
if(round.getExecution().getLearner().firstMessageProposed==null) {
round.getExecution().getLearner().firstMessageProposed.strongSentTime = System.nanoTime();
}
PaxosMessage pm = factory.createStrong(eid, round.getNumber(), value);
// override default authentication and create a vector of MACs
ByteArrayOutputStream bOut = new ByteArrayOutputStream(248);
try {
new ObjectOutputStream(bOut).writeObject(pm);
} catch (IOException ex) {
ex.printStackTrace();
}
byte[] data = bOut.toByteArray();
byte[] hash = tomLayer.computeHash(data);
int[] processes = this.reconfManager.getCurrentViewAcceptors();
HashMap<Integer, byte[]> macVector = new HashMap<Integer, byte[]>();
for (int id : processes) {
try {
SecretKeySpec key = new SecretKeySpec(communication.getServersConn().getSecretKey(id).getEncoded(), "DES");
this.cipher.init(Cipher.ENCRYPT_MODE, key);
macVector.put(id, this.cipher.doFinal(hash));
} catch (IllegalBlockSizeException | BadPaddingException | InvalidKeyException ex) {
ex.printStackTrace();
}
}
pm.setMACVector(macVector);
int[] targets = this.reconfManager.getCurrentViewOtherAcceptors();
communication.getServersConn().send(targets, pm, false);
//communication.send(this.reconfManager.getCurrentViewOtherAcceptors(),
//factory.createStrong(eid, round.getNumber(), value));
round.addToProof(pm);
computeStrong(eid, round, value);
}
}
}
/**
* Called when a STRONG message is received
* @param eid Execution ID of the received message
* @param round Round of the receives message
* @param a Replica that sent the message
* @param value Value sent in the message
*/
private void strongAcceptReceived(Round round, PaxosMessage msg) {
int eid = round.getExecution().getId();
Logger.println("(Acceptor.strongAcceptReceived) STRONG from " + msg.getSender() + " for consensus " + eid);
round.setStrong(msg.getSender(), msg.getValue());
round.addToProof(msg);
computeStrong(eid, round, msg.getValue());
}
/**
* Computes strongly accepted values according to the standard consensus
* specification
* @param round Round of the receives message
* @param value Value sent in the message
*/
private void computeStrong(int eid, Round round, byte[] value) {
Logger.println("(Acceptor.computeStrong) I have " + round.countStrong(value) +
" strongs for " + eid + "," + round.getNumber());
if (round.countStrong(value) > reconfManager.getQuorum2F() && !round.getExecution().isDecided()) {
Logger.println("(Acceptor.computeStrong) Deciding " + eid);
decide(round, value);
}
}
/**
* This is the method invoked when a value is decided by this process
* @param round Round at which the decision is made
* @param value The decided value (got from WEAK or STRONG messages)
*/
private void decide(Round round, byte[] value) {
if (round.getExecution().getLearner().firstMessageProposed != null)
round.getExecution().getLearner().firstMessageProposed.decisionTime = System.nanoTime();
leaderModule.decided(round.getExecution().getId(),
tomLayer.lm.getCurrentLeader()/*leaderModule.getLeader(round.getExecution().getId(),
round.getNumber())*/);
round.getExecution().decided(round, value);
}
}
| |
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.build;
import org.gradle.BuildListener;
import org.gradle.BuildResult;
import org.gradle.api.Task;
import org.gradle.api.internal.GradleInternal;
import org.gradle.api.internal.SettingsInternal;
import org.gradle.execution.BuildWorkExecutor;
import org.gradle.execution.plan.BuildWorkPlan;
import org.gradle.execution.plan.ExecutionPlan;
import org.gradle.execution.plan.Node;
import org.gradle.initialization.BuildCompletionListener;
import org.gradle.initialization.exception.ExceptionAnalyser;
import org.gradle.initialization.internal.InternalBuildFinishedListener;
import org.gradle.internal.Describables;
import org.gradle.internal.concurrent.CompositeStoppable;
import org.gradle.internal.model.StateTransitionController;
import org.gradle.internal.model.StateTransitionControllerFactory;
import org.gradle.internal.service.scopes.BuildScopeServices;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Function;
@SuppressWarnings("deprecation")
public class DefaultBuildLifecycleController implements BuildLifecycleController {
private enum State implements StateTransitionController.State {
// Configuring the build, can access build model
Configure,
// Scheduling tasks for execution
TaskSchedule,
ReadyToRun,
// build has finished and should do no further work
Finished
}
private final ExceptionAnalyser exceptionAnalyser;
private final BuildListener buildListener;
private final BuildCompletionListener buildCompletionListener;
private final InternalBuildFinishedListener buildFinishedListener;
private final BuildWorkPreparer workPreparer;
private final BuildWorkExecutor workExecutor;
private final BuildScopeServices buildServices;
private final BuildToolingModelControllerFactory toolingModelControllerFactory;
private final BuildModelController modelController;
private final StateTransitionController<State> state;
private final GradleInternal gradle;
private boolean hasTasks;
public DefaultBuildLifecycleController(
GradleInternal gradle,
BuildModelController buildModelController,
ExceptionAnalyser exceptionAnalyser,
BuildListener buildListener,
BuildCompletionListener buildCompletionListener,
InternalBuildFinishedListener buildFinishedListener,
BuildWorkPreparer workPreparer,
BuildWorkExecutor workExecutor,
BuildScopeServices buildServices,
BuildToolingModelControllerFactory toolingModelControllerFactory,
StateTransitionControllerFactory controllerFactory
) {
this.gradle = gradle;
this.modelController = buildModelController;
this.exceptionAnalyser = exceptionAnalyser;
this.buildListener = buildListener;
this.workPreparer = workPreparer;
this.workExecutor = workExecutor;
this.buildCompletionListener = buildCompletionListener;
this.buildFinishedListener = buildFinishedListener;
this.buildServices = buildServices;
this.toolingModelControllerFactory = toolingModelControllerFactory;
this.state = controllerFactory.newController(Describables.of("state of", gradle.getOwner().getDisplayName()), State.Configure);
}
@Override
public GradleInternal getGradle() {
// Should not ignore other threads, however it is currently possible for this to be queried by tasks at execution time (that is, when another thread is
// transitioning the task graph state). Instead, it may be better to:
// - have the threads use some specific immutable view of the build model state instead of requiring direct access to the build model.
// - not have a thread blocked around task execution, so that other threads can use the build model.
// - maybe split the states into one for the build model and one for the task graph.
state.assertNotInState(State.Finished);
return gradle;
}
@Override
public void loadSettings() {
state.notInState(State.Finished, modelController::getLoadedSettings);
}
@Override
public <T> T withSettings(Function<? super SettingsInternal, T> action) {
return state.notInState(State.Finished, () -> action.apply(modelController.getLoadedSettings()));
}
@Override
public void configureProjects() {
state.notInState(State.Finished, modelController::getConfiguredModel);
}
@Override
public <T> T withProjectsConfigured(Function<? super GradleInternal, T> action) {
return state.notInState(State.Finished, () -> action.apply(modelController.getConfiguredModel()));
}
@Override
public GradleInternal getConfiguredBuild() {
// Should not ignore other threads. See above.
return state.notInStateIgnoreOtherThreads(State.Finished, modelController::getConfiguredModel);
}
@Override
public void prepareToScheduleTasks() {
state.maybeTransition(State.Configure, State.TaskSchedule, () -> {
hasTasks = true;
modelController.prepareToScheduleTasks();
});
}
@Override
public BuildWorkPlan newWorkGraph() {
return state.inState(State.TaskSchedule, () -> {
ExecutionPlan plan = workPreparer.newExecutionPlan();
modelController.initializeWorkGraph(plan);
return new DefaultBuildWorkPlan(this, plan);
});
}
@Override
public void populateWorkGraph(BuildWorkPlan plan, Consumer<? super WorkGraphBuilder> action) {
DefaultBuildWorkPlan workPlan = unpack(plan);
state.inState(State.TaskSchedule, () -> workPreparer.populateWorkGraph(gradle, workPlan.plan, dest -> action.accept(new DefaultWorkGraphBuilder(dest))));
}
@Override
public void finalizeWorkGraph(BuildWorkPlan plan) {
DefaultBuildWorkPlan workPlan = unpack(plan);
state.transition(State.TaskSchedule, State.ReadyToRun, () -> {
workPreparer.finalizeWorkGraph(gradle, workPlan.plan);
});
}
@Override
public ExecutionResult<Void> executeTasks(BuildWorkPlan plan) {
// Execute tasks and transition back to "configure", as this build may run more tasks;
DefaultBuildWorkPlan workPlan = unpack(plan);
return state.tryTransition(State.ReadyToRun, State.Configure, () -> workExecutor.execute(gradle, workPlan.plan));
}
private DefaultBuildWorkPlan unpack(BuildWorkPlan plan) {
DefaultBuildWorkPlan workPlan = (DefaultBuildWorkPlan) plan;
if (workPlan.owner != this) {
throw new IllegalArgumentException("Unexpected plan owner.");
}
return workPlan;
}
@Override
public <T> T withToolingModels(Function<? super BuildToolingModelController, T> action) {
return action.apply(toolingModelControllerFactory.createController(gradle.getOwner(), this));
}
@Override
public ExecutionResult<Void> finishBuild(@Nullable Throwable failure) {
return state.finish(State.Finished, stageFailures -> {
// Fire the build finished events even if nothing has happened to this build, because quite a lot of internal infrastructure
// adds listeners and expects to see a build finished event. Infrastructure should not be using the public listener types
// In addition, they almost all should be using a build tree scoped event instead of a build scoped event
Throwable reportableFailure = failure;
if (reportableFailure == null && !stageFailures.getFailures().isEmpty()) {
reportableFailure = exceptionAnalyser.transform(stageFailures.getFailures());
}
BuildResult buildResult = new BuildResult(hasTasks ? "Build" : "Configure", gradle, reportableFailure);
ExecutionResult<Void> finishResult;
try {
buildListener.buildFinished(buildResult);
buildFinishedListener.buildFinished((GradleInternal) buildResult.getGradle(), buildResult.getFailure() != null);
finishResult = ExecutionResult.succeeded();
} catch (Throwable t) {
finishResult = ExecutionResult.failed(t);
}
return finishResult;
});
}
/**
* <p>Adds a listener to this build instance. The listener is notified of events which occur during the execution of the build.
* See {@link org.gradle.api.invocation.Gradle#addListener(Object)} for supported listener types.</p>
*
* @param listener The listener to add. Has no effect if the listener has already been added.
*/
@Override
public void addListener(Object listener) {
getGradle().addListener(listener);
}
@Override
public void stop() {
try {
CompositeStoppable.stoppable(buildServices).stop();
} finally {
buildCompletionListener.completed();
}
}
private static class DefaultBuildWorkPlan implements BuildWorkPlan {
private final DefaultBuildLifecycleController owner;
private final ExecutionPlan plan;
public DefaultBuildWorkPlan(DefaultBuildLifecycleController owner, ExecutionPlan plan) {
this.owner = owner;
this.plan = plan;
}
}
private class DefaultWorkGraphBuilder implements WorkGraphBuilder {
private final ExecutionPlan plan;
public DefaultWorkGraphBuilder(ExecutionPlan plan) {
this.plan = plan;
}
@Override
public void addRequestedTasks() {
modelController.scheduleRequestedTasks(plan);
}
@Override
public void addEntryTasks(List<? extends Task> tasks) {
for (Task task : tasks) {
plan.addEntryTasks(Collections.singletonList(task));
}
}
@Override
public void addNodes(List<? extends Node> nodes) {
plan.addNodes(nodes);
}
}
}
| |
package com.github.mikephil.charting.data;
import android.graphics.Paint;
import com.github.mikephil.charting.interfaces.datasets.ICandleDataSet;
import com.github.mikephil.charting.utils.ColorTemplate;
import com.github.mikephil.charting.utils.Utils;
import java.util.ArrayList;
import java.util.List;
/**
* DataSet for the CandleStickChart.
*
* @author Philipp Jahoda
*/
public class CandleDataSet extends LineScatterCandleRadarDataSet<CandleEntry> implements ICandleDataSet {
/**
* the width of the shadow of the candle
*/
private float mShadowWidth = 3f;
/**
* should the candle bars show?
* when false, only "ticks" will show
*
* - default: true
*/
private boolean mShowCandleBar = true;
/**
* the space between the candle entries, default 0.1f (10%)
*/
private float mBarSpace = 0.1f;
/**
* use candle color for the shadow
*/
private boolean mShadowColorSameAsCandle = false;
/**
* paint style when open < close
* increasing candlesticks are traditionally hollow
*/
protected Paint.Style mIncreasingPaintStyle = Paint.Style.STROKE;
/**
* paint style when open > close
* descreasing candlesticks are traditionally filled
*/
protected Paint.Style mDecreasingPaintStyle = Paint.Style.FILL;
/**
* color for open == close
*/
protected int mNeutralColor = ColorTemplate.COLOR_NONE;
/**
* color for open < close
*/
protected int mIncreasingColor = ColorTemplate.COLOR_NONE;
/**
* color for open > close
*/
protected int mDecreasingColor = ColorTemplate.COLOR_NONE;
/**
* shadow line color, set -1 for backward compatibility and uses default
* color
*/
protected int mShadowColor = ColorTemplate.COLOR_NONE;
public CandleDataSet(List<CandleEntry> yVals, String label) {
super(yVals, label);
}
@Override
public DataSet<CandleEntry> copy() {
List<CandleEntry> yVals = new ArrayList<CandleEntry>();
for (int i = 0; i < mYVals.size(); i++) {
yVals.add(mYVals.get(i).copy());
}
CandleDataSet copied = new CandleDataSet(yVals, getLabel());
copied.mColors = mColors;
copied.mShadowWidth = mShadowWidth;
copied.mShowCandleBar = mShowCandleBar;
copied.mBarSpace = mBarSpace;
copied.mHighLightColor = mHighLightColor;
copied.mIncreasingPaintStyle = mIncreasingPaintStyle;
copied.mDecreasingPaintStyle = mDecreasingPaintStyle;
copied.mShadowColor = mShadowColor;
return copied;
}
@Override
public void calcMinMax(int start, int end) {
// super.calculate();
if (mYVals == null)
return;
if (mYVals.size() == 0)
return;
int endValue;
if (end == 0 || end >= mYVals.size())
endValue = mYVals.size() - 1;
else
endValue = end;
mYMin = Float.MAX_VALUE;
mYMax = -Float.MAX_VALUE;
for (int i = start; i <= endValue; i++) {
CandleEntry e = mYVals.get(i);
if (e.getLow() < mYMin)
mYMin = e.getLow();
if (e.getHigh() > mYMax)
mYMax = e.getHigh();
}
}
/**
* Sets the space that is left out on the left and right side of each
* candle, default 0.1f (10%), max 0.45f, min 0f
*
* @param space
*/
public void setBarSpace(float space) {
if (space < 0f)
space = 0f;
if (space > 0.45f)
space = 0.45f;
mBarSpace = space;
}
@Override
public float getBarSpace() {
return mBarSpace;
}
/**
* Sets the width of the candle-shadow-line in pixels. Default 3f.
*
* @param width
*/
public void setShadowWidth(float width) {
mShadowWidth = Utils.convertDpToPixel(width);
}
@Override
public float getShadowWidth() {
return mShadowWidth;
}
/**
* Sets whether the candle bars should show?
*
* @param showCandleBar
*/
public void setShowCandleBar(boolean showCandleBar) {
mShowCandleBar = showCandleBar;
}
@Override
public boolean getShowCandleBar() {
return mShowCandleBar;
}
// TODO
/**
* It is necessary to implement ColorsList class that will encapsulate
* colors list functionality, because It's wrong to copy paste setColor,
* addColor, ... resetColors for each time when we want to add a coloring
* options for one of objects
*
* @author Mesrop
*/
/** BELOW THIS COLOR HANDLING */
/**
* Sets the one and ONLY color that should be used for this DataSet when
* open == close.
*
* @param color
*/
public void setNeutralColor(int color) {
mNeutralColor = color;
}
@Override
public int getNeutralColor() {
return mNeutralColor;
}
/**
* Sets the one and ONLY color that should be used for this DataSet when
* open <= close.
*
* @param color
*/
public void setIncreasingColor(int color) {
mIncreasingColor = color;
}
@Override
public int getIncreasingColor() {
return mIncreasingColor;
}
/**
* Sets the one and ONLY color that should be used for this DataSet when
* open > close.
*
* @param color
*/
public void setDecreasingColor(int color) {
mDecreasingColor = color;
}
@Override
public int getDecreasingColor() {
return mDecreasingColor;
}
@Override
public Paint.Style getIncreasingPaintStyle() {
return mIncreasingPaintStyle;
}
/**
* Sets paint style when open < close
*
* @param paintStyle
*/
public void setIncreasingPaintStyle(Paint.Style paintStyle) {
this.mIncreasingPaintStyle = paintStyle;
}
@Override
public Paint.Style getDecreasingPaintStyle() {
return mDecreasingPaintStyle;
}
/**
* Sets paint style when open > close
*
* @param decreasingPaintStyle
*/
public void setDecreasingPaintStyle(Paint.Style decreasingPaintStyle) {
this.mDecreasingPaintStyle = decreasingPaintStyle;
}
@Override
public int getShadowColor() {
return mShadowColor;
}
/**
* Sets shadow color for all entries
*
* @param shadowColor
*/
public void setShadowColor(int shadowColor) {
this.mShadowColor = shadowColor;
}
@Override
public boolean getShadowColorSameAsCandle() {
return mShadowColorSameAsCandle;
}
/**
* Sets shadow color to be the same color as the candle color
*
* @param shadowColorSameAsCandle
*/
public void setShadowColorSameAsCandle(boolean shadowColorSameAsCandle) {
this.mShadowColorSameAsCandle = shadowColorSameAsCandle;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.MasterNotDiscoveredException;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
/**
*/
public class LocalAllocateDangledIndices extends AbstractComponent {
public static final String ACTION_NAME = "internal:gateway/local/allocate_dangled";
private final TransportService transportService;
private final ClusterService clusterService;
private final AllocationService allocationService;
@Inject
public LocalAllocateDangledIndices(Settings settings, TransportService transportService, ClusterService clusterService, AllocationService allocationService) {
super(settings);
this.transportService = transportService;
this.clusterService = clusterService;
this.allocationService = allocationService;
transportService.registerHandler(ACTION_NAME, new AllocateDangledRequestHandler());
}
public void allocateDangled(Collection<IndexMetaData> indices, final Listener listener) {
ClusterState clusterState = clusterService.state();
DiscoveryNode masterNode = clusterState.nodes().masterNode();
if (masterNode == null) {
listener.onFailure(new MasterNotDiscoveredException("no master to send allocate dangled request"));
return;
}
AllocateDangledRequest request = new AllocateDangledRequest(clusterService.localNode(), indices.toArray(new IndexMetaData[indices.size()]));
transportService.sendRequest(masterNode, ACTION_NAME, request, new TransportResponseHandler<AllocateDangledResponse>() {
@Override
public AllocateDangledResponse newInstance() {
return new AllocateDangledResponse();
}
@Override
public void handleResponse(AllocateDangledResponse response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
listener.onFailure(exp);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
}
public static interface Listener {
void onResponse(AllocateDangledResponse response);
void onFailure(Throwable e);
}
class AllocateDangledRequestHandler extends BaseTransportRequestHandler<AllocateDangledRequest> {
@Override
public AllocateDangledRequest newInstance() {
return new AllocateDangledRequest();
}
@Override
public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel) throws Exception {
String[] indexNames = new String[request.indices.length];
for (int i = 0; i < request.indices.length; i++) {
indexNames[i] = request.indices[i].index();
}
clusterService.submitStateUpdateTask("allocation dangled indices " + Arrays.toString(indexNames), new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
if (currentState.blocks().disableStatePersistence()) {
return currentState;
}
MetaData.Builder metaData = MetaData.builder(currentState.metaData());
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable());
boolean importNeeded = false;
StringBuilder sb = new StringBuilder();
for (IndexMetaData indexMetaData : request.indices) {
if (currentState.metaData().hasIndex(indexMetaData.index())) {
continue;
}
if (currentState.metaData().aliases().containsKey(indexMetaData.index())) {
logger.warn("ignoring dangled index [{}] on node [{}] due to an existing alias with the same name",
indexMetaData.index(), request.fromNode);
continue;
}
importNeeded = true;
metaData.put(indexMetaData, false);
blocks.addBlocks(indexMetaData);
routingTableBuilder.addAsRecovery(indexMetaData);
sb.append("[").append(indexMetaData.index()).append("/").append(indexMetaData.state()).append("]");
}
if (!importNeeded) {
return currentState;
}
logger.info("auto importing dangled indices {} from [{}]", sb.toString(), request.fromNode);
ClusterState updatedState = ClusterState.builder(currentState).metaData(metaData).blocks(blocks).routingTable(routingTableBuilder).build();
// now, reroute
RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(updatedState).routingTable(routingTableBuilder).build());
return ClusterState.builder(updatedState).routingResult(routingResult).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
try {
channel.sendResponse(t);
} catch (Exception e) {
logger.warn("failed send response for allocating dangled", e);
}
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
try {
channel.sendResponse(new AllocateDangledResponse(true));
} catch (IOException e) {
logger.warn("failed send response for allocating dangled", e);
}
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
static class AllocateDangledRequest extends TransportRequest {
DiscoveryNode fromNode;
IndexMetaData[] indices;
AllocateDangledRequest() {
}
AllocateDangledRequest(DiscoveryNode fromNode, IndexMetaData[] indices) {
this.fromNode = fromNode;
this.indices = indices;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
fromNode = DiscoveryNode.readNode(in);
indices = new IndexMetaData[in.readVInt()];
for (int i = 0; i < indices.length; i++) {
indices[i] = IndexMetaData.Builder.readFrom(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
fromNode.writeTo(out);
out.writeVInt(indices.length);
for (IndexMetaData indexMetaData : indices) {
IndexMetaData.Builder.writeTo(indexMetaData, out);
}
}
}
public static class AllocateDangledResponse extends TransportResponse {
private boolean ack;
AllocateDangledResponse() {
}
AllocateDangledResponse(boolean ack) {
this.ack = ack;
}
public boolean ack() {
return ack;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
ack = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(ack);
}
}
}
| |
/*
* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.stetho.json;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import com.facebook.stetho.common.ExceptionUtil;
import com.facebook.stetho.json.annotation.JsonProperty;
import com.facebook.stetho.json.annotation.JsonValue;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* This class is a lightweight version of Jackson's ObjectMapper. It is designed to have a minimal
* subset of the functionality required for stetho.
* <p>
* It would be awesome if there were a lightweight library that supported converting between
* arbitrary {@link Object} and {@link JSONObject} representations.
* <p>
* Admittedly the other approach would be to use an Annotation Processor to create static conversion
* functions that discover something like a {@link JsonProperty} and create a function at compile
* time however since this is just being used for a simple debug utility and Kit-Kat caches the
* results of reflection this class is sufficient for stethos needs.
*/
public class ObjectMapper {
@GuardedBy("mJsonValueMethodCache")
private final Map<Class<?>, Method> mJsonValueMethodCache = new IdentityHashMap<>();
/**
* Support mapping between arbitrary classes and {@link JSONObject}.
* <note>
* It is possible for a {@link Throwable} to be propagated out of this class if there is an
* {@link InvocationTargetException}.
* </note>
* @param fromValue
* @param toValueType
* @param <T>
* @return
* @throws IllegalArgumentException when there is an error converting. One of either
* {@code fromValue.getClass()} or {@code toValueType} must be {@link JSONObject}.
*/
public <T> T convertValue(Object fromValue, Class<T> toValueType)
throws IllegalArgumentException {
if (fromValue == null) {
return null;
}
if (toValueType != Object.class
&& toValueType.isAssignableFrom(fromValue.getClass())) {
return (T) fromValue;
}
try {
if (fromValue instanceof JSONObject) {
return _convertFromJSONObject((JSONObject) fromValue, toValueType);
} else if (toValueType == JSONObject.class) {
return (T) _convertToJSONObject(fromValue);
} else {
throw new IllegalArgumentException(
"Expecting either fromValue or toValueType to be a JSONObject");
}
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException(e);
} catch (InstantiationException e) {
throw new IllegalArgumentException(e);
} catch (JSONException e) {
throw new IllegalArgumentException(e);
} catch (InvocationTargetException e) {
throw ExceptionUtil.propagate(e.getCause());
}
}
private <T> T _convertFromJSONObject(JSONObject jsonObject, Class<T> type)
throws NoSuchMethodException, IllegalAccessException, InvocationTargetException,
InstantiationException, JSONException {
Constructor<T> constructor = type.getDeclaredConstructor((Class[]) null);
constructor.setAccessible(true);
T instance = constructor.newInstance();
Field[] fields = type.getFields();
for (int i = 0; i < fields.length; ++i) {
Field field = fields[i];
Object value = jsonObject.opt(field.getName());
Object setValue = getValueForField(field, value);
try {
field.set(instance, setValue);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Class: " + type.getSimpleName() + " " +
"Field: " + field.getName() + " type " + setValue.getClass().getName(),
e);
}
}
return instance;
}
private Object getValueForField(Field field, Object value)
throws JSONException {
try {
if (value != null) {
if (value == JSONObject.NULL) {
return null;
}
if (value.getClass() == field.getType()) {
return value;
}
if (value instanceof JSONObject) {
return convertValue(value, field.getType());
} else {
if (field.getType().isEnum()) {
return getEnumValue((String) value, field.getType().asSubclass(Enum.class));
} else if (value instanceof JSONArray) {
return convertArrayToList(field, (JSONArray) value);
} else if (value instanceof Number) {
// Need to convert value to Number This happens because json treats 1 as an Integer even
// if the field is supposed to be a Long
Number numberValue = (Number) value;
Class<?> clazz = field.getType();
if (clazz == Integer.class || clazz == int.class) {
return numberValue.intValue();
} else if (clazz == Long.class || clazz == long.class) {
return numberValue.longValue();
} else if (clazz == Double.class || clazz == double.class) {
return numberValue.doubleValue();
} else if (clazz == Float.class || clazz == float.class) {
return numberValue.floatValue();
} else if (clazz == Byte.class || clazz == byte.class) {
return numberValue.byteValue();
} else if (clazz == Short.class || clazz == short.class) {
return numberValue.shortValue();
} else {
throw new IllegalArgumentException("Not setup to handle class " + clazz.getName());
}
}
}
}
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Unable to set value for field " + field.getName(), e);
}
return value;
}
private Enum getEnumValue(String value, Class<? extends Enum> clazz) {
Method method = getJsonValueMethod(clazz);
if (method != null) {
return getEnumByMethod(value, clazz, method);
} else {
return Enum.valueOf(clazz, value);
}
}
/**
* In this case we know that there is an {@link Enum} decorated with {@link JsonValue}. This means
* that we need to iterate through all of the values of the {@link Enum} returned by the given
* {@link Method} to check the given value.
* @param value
* @param clazz
* @param method
* @return
*/
private Enum getEnumByMethod(String value, Class<? extends Enum> clazz, Method method) {
Enum[] enumValues = clazz.getEnumConstants();
// Start at the front to ensure first always wins
for (int i = 0; i < enumValues.length; ++i) {
Enum enumValue = enumValues[i];
try {
Object o = method.invoke(enumValue);
if (o != null) {
if (o.toString().equals(value)) {
return enumValue;
}
}
} catch (Exception ex) {
throw new IllegalArgumentException(ex);
}
}
throw new IllegalArgumentException("No enum constant " + clazz.getName() + "." + value);
}
private List<Object> convertArrayToList(Field field, JSONArray array)
throws IllegalAccessException, JSONException {
if (List.class.isAssignableFrom(field.getType())) {
ParameterizedType parameterizedType = (ParameterizedType) field.getGenericType();
Type[] types = parameterizedType.getActualTypeArguments();
if (types.length != 1) {
throw new IllegalArgumentException("Only able to handle a single type in a list " +
field.getName());
}
Class arrayClass = (Class)types[0];
List<Object> objectList = new ArrayList<Object>();
for (int i = 0; i < array.length(); ++i) {
if (arrayClass.isEnum()) {
objectList.add(getEnumValue(array.getString(i), arrayClass));
} else if (canDirectlySerializeClass(arrayClass)) {
objectList.add(array.get(i));
} else {
JSONObject jsonObject = array.getJSONObject(i);
if (jsonObject == null) {
objectList.add(null);
} else {
objectList.add(convertValue(jsonObject, arrayClass));
}
}
}
return objectList;
} else {
throw new IllegalArgumentException("only know how to deserialize List<?> on field "
+ field.getName());
}
}
private JSONObject _convertToJSONObject(Object fromValue)
throws JSONException, InvocationTargetException, IllegalAccessException {
JSONObject jsonObject = new JSONObject();
Field[] fields = fromValue.getClass().getFields();
for (int i = 0; i < fields.length; ++i) {
JsonProperty property = fields[i].getAnnotation(JsonProperty.class);
if (property != null) {
// AutoBox here ...
Object value = fields[i].get(fromValue);
Class clazz = fields[i].getType();
if (value != null) {
clazz = value.getClass();
}
String name = fields[i].getName();
if (property.required() && value == null) {
value = JSONObject.NULL;
} else if (value == JSONObject.NULL) {
// Leave it as null in this case.
} else {
value = getJsonValue(value, clazz, fields[i]);
}
jsonObject.put(name, value);
}
}
return jsonObject;
}
private Object getJsonValue(Object value, Class<?> clazz, Field field)
throws InvocationTargetException, IllegalAccessException {
if (value == null) {
// Now technically we /could/ return JsonNode.NULL here but Chrome's webkit inspector croaks
// if you pass a null "id"
return null;
}
if (List.class.isAssignableFrom(clazz)) {
return convertListToJsonArray(value);
}
// Finally check to see if there is a JsonValue present
Method m = getJsonValueMethod(clazz);
if (m != null) {
return m.invoke(value);
}
if (!canDirectlySerializeClass(clazz)) {
return convertValue(value, JSONObject.class);
}
// JSON has no support for NaN, Infinity or -Infinity, so we serialize
// then as strings. Google Chrome's inspector will accept them just fine.
if (clazz.equals(Double.class) || clazz.equals(Float.class)) {
double doubleValue = ((Number) value).doubleValue();
if (Double.isNaN(doubleValue)) {
return "NaN";
} else if (doubleValue == Double.POSITIVE_INFINITY) {
return "Infinity";
} else if (doubleValue == Double.NEGATIVE_INFINITY) {
return "-Infinity";
}
}
// hmm we should be able to directly serialize here...
return value;
}
private JSONArray convertListToJsonArray(Object value)
throws InvocationTargetException, IllegalAccessException {
JSONArray array = new JSONArray();
List<Object> list = (List<Object>) value;
for(Object obj : list) {
// Send null, if this is an array of arrays we are screwed
array.put(obj != null ? getJsonValue(obj, obj.getClass(), null /* field */) : null);
}
return array;
}
/**
*
* @param clazz
* @return the first method annotated with {@link JsonValue} or null if one does not exist.
*/
@Nullable
private Method getJsonValueMethod(Class<?> clazz) {
synchronized (mJsonValueMethodCache) {
Method method = mJsonValueMethodCache.get(clazz);
if (method == null && !mJsonValueMethodCache.containsKey(clazz)) {
method = getJsonValueMethodImpl(clazz);
mJsonValueMethodCache.put(clazz, method);
}
return method;
}
}
@Nullable
private static Method getJsonValueMethodImpl(Class<?> clazz) {
Method[] methods = clazz.getMethods();
for(int i = 0; i < methods.length; ++i) {
Annotation jsonValue = methods[i].getAnnotation(JsonValue.class);
if (jsonValue != null) {
return methods[i];
}
}
return null;
}
private static boolean canDirectlySerializeClass(Class clazz) {
return isWrapperOrPrimitiveType(clazz) ||
clazz.equals(String.class);
}
private static boolean isWrapperOrPrimitiveType(Class<?> clazz) {
return clazz.isPrimitive() ||
clazz.equals(Boolean.class) ||
clazz.equals(Integer.class) ||
clazz.equals(Character.class) ||
clazz.equals(Byte.class) ||
clazz.equals(Short.class) ||
clazz.equals(Double.class) ||
clazz.equals(Long.class) ||
clazz.equals(Float.class);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.replication.queue.impl;
import java.util.Dictionary;
import org.apache.sling.replication.agent.ReplicationAgent;
import org.apache.sling.replication.queue.ReplicationQueue;
import org.apache.sling.replication.queue.ReplicationQueueItem;
import org.apache.sling.replication.queue.ReplicationQueueItemState;
import org.apache.sling.replication.queue.ReplicationQueueProvider;
import org.junit.Test;
import org.osgi.service.component.ComponentContext;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Testcase for {@link org.apache.sling.replication.queue.impl.PriorityPathDistributionStrategy}
*/
public class PriorityPathQueueDistributionStrategyTest {
@Test
public void testPackageAdditionWithSucceedingItemDelivery() throws Exception {
PriorityPathDistributionStrategy priorityPathDistributionStrategy = new PriorityPathDistributionStrategy();
ComponentContext context = mock(ComponentContext.class);
Dictionary properties = mock(Dictionary.class);
when(properties.get("priority.paths")).thenReturn(new String[]{"/content", "/apps"});
when(context.getProperties()).thenReturn(properties);
priorityPathDistributionStrategy.activate(context);
ReplicationQueueItem replicationPackage = mock(ReplicationQueueItem.class);
when(replicationPackage.getPaths()).thenReturn(new String[]{"/etc"});
ReplicationQueueProvider queueProvider = mock(ReplicationQueueProvider.class);
ReplicationQueue queue = mock(ReplicationQueue.class);
when(queueProvider.getDefaultQueue("agentName")).thenReturn(queue);
when(queue.add(replicationPackage)).thenReturn(true);
ReplicationQueueItemState state = mock(ReplicationQueueItemState.class);
when(state.isSuccessful()).thenReturn(true);
when(queue.getStatus(replicationPackage)).thenReturn(state);
ReplicationQueueItemState returnedState = priorityPathDistributionStrategy.add("agentName", replicationPackage, queueProvider);
assertNotNull(returnedState);
assertTrue(returnedState.isSuccessful());
}
@Test
public void testPackageAdditionWithSucceedingItemDeliveryOnPriorityPath() throws Exception {
PriorityPathDistributionStrategy priorityPathDistributionStrategy = new PriorityPathDistributionStrategy();
ComponentContext context = mock(ComponentContext.class);
Dictionary properties = mock(Dictionary.class);
when(properties.get("priority.paths")).thenReturn(new String[]{"/content", "/apps"});
when(context.getProperties()).thenReturn(properties);
priorityPathDistributionStrategy.activate(context);
ReplicationQueueItem replicationPackage = mock(ReplicationQueueItem.class);
when(replicationPackage.getPaths()).thenReturn(new String[]{"/content/sample1"});
ReplicationQueueProvider queueProvider = mock(ReplicationQueueProvider.class);
ReplicationQueue queue = mock(ReplicationQueue.class);
when(queueProvider.getQueue("agentName", "/content")).thenReturn(queue);
when(queue.add(replicationPackage)).thenReturn(true);
ReplicationQueueItemState state = mock(ReplicationQueueItemState.class);
when(state.isSuccessful()).thenReturn(true);
when(queue.getStatus(replicationPackage)).thenReturn(state);
ReplicationQueueItemState returnedState = priorityPathDistributionStrategy.add("agentName", replicationPackage, queueProvider);
assertNotNull(returnedState);
assertTrue(returnedState.isSuccessful());
}
@Test
public void testPackageAdditionWithFailingItemDelivery() throws Exception {
PriorityPathDistributionStrategy priorityPathDistributionStrategy = new PriorityPathDistributionStrategy();
ComponentContext context = mock(ComponentContext.class);
Dictionary properties = mock(Dictionary.class);
when(properties.get("priority.paths")).thenReturn(new String[]{"/content", "/apps"});
when(context.getProperties()).thenReturn(properties);
priorityPathDistributionStrategy.activate(context);
ReplicationQueueItem replicationPackage = mock(ReplicationQueueItem.class);
when(replicationPackage.getPaths()).thenReturn(new String[]{"/etc"});
ReplicationQueueProvider queueProvider = mock(ReplicationQueueProvider.class);
ReplicationQueue queue = mock(ReplicationQueue.class);
when(queueProvider.getDefaultQueue("agentName")).thenReturn(queue);
when(queue.add(replicationPackage)).thenReturn(true);
ReplicationQueueItemState state = mock(ReplicationQueueItemState.class);
when(state.isSuccessful()).thenReturn(false);
when(queue.getStatus(replicationPackage)).thenReturn(state);
ReplicationQueueItemState returnedState = priorityPathDistributionStrategy.add("agentName", replicationPackage, queueProvider);
assertNotNull(returnedState);
assertFalse(returnedState.isSuccessful());
}
@Test
public void testPackageAdditionWithFailingItemDeliveryOnPriorityPath() throws Exception {
PriorityPathDistributionStrategy priorityPathDistributionStrategy = new PriorityPathDistributionStrategy();
ComponentContext context = mock(ComponentContext.class);
Dictionary properties = mock(Dictionary.class);
when(properties.get("priority.paths")).thenReturn(new String[]{"/content", "/apps"});
when(context.getProperties()).thenReturn(properties);
priorityPathDistributionStrategy.activate(context);
ReplicationQueueItem replicationPackage = mock(ReplicationQueueItem.class);
when(replicationPackage.getPaths()).thenReturn(new String[]{"/content/sample2"});
ReplicationQueueProvider queueProvider = mock(ReplicationQueueProvider.class);
ReplicationQueue queue = mock(ReplicationQueue.class);
when(queueProvider.getQueue("agentName", "/content")).thenReturn(queue);
when(queue.add(replicationPackage)).thenReturn(true);
ReplicationQueueItemState state = mock(ReplicationQueueItemState.class);
when(state.isSuccessful()).thenReturn(false);
when(queue.getStatus(replicationPackage)).thenReturn(state);
ReplicationQueueItemState returnedState = priorityPathDistributionStrategy.add("agentName", replicationPackage, queueProvider);
assertNotNull(returnedState);
assertFalse(returnedState.isSuccessful());
}
@Test
public void testPackageAdditionWithNullItemStateFromTheQueue() throws Exception {
PriorityPathDistributionStrategy priorityPathDistributionStrategy = new PriorityPathDistributionStrategy();
ComponentContext context = mock(ComponentContext.class);
Dictionary properties = mock(Dictionary.class);
when(properties.get("priority.paths")).thenReturn(new String[]{"/content", "/apps"});
when(context.getProperties()).thenReturn(properties);
priorityPathDistributionStrategy.activate(context);
ReplicationQueueItem replicationPackage = mock(ReplicationQueueItem.class);
when(replicationPackage.getPaths()).thenReturn(new String[]{"/etc"});
ReplicationQueueProvider queueProvider = mock(ReplicationQueueProvider.class);
ReplicationQueue queue = mock(ReplicationQueue.class);
when(queueProvider.getDefaultQueue("agentName")).thenReturn(queue);
when(queue.add(replicationPackage)).thenReturn(true);
ReplicationQueueItemState returnedState = priorityPathDistributionStrategy.add("agentName", replicationPackage, queueProvider);
assertNull(returnedState);
}
@Test
public void testPackageAdditionWithNullItemStateFromTheQueueOnPriorityPath() throws Exception {
PriorityPathDistributionStrategy priorityPathDistributionStrategy = new PriorityPathDistributionStrategy();
ComponentContext context = mock(ComponentContext.class);
Dictionary properties = mock(Dictionary.class);
when(properties.get("priority.paths")).thenReturn(new String[]{"/content", "/apps"});
when(context.getProperties()).thenReturn(properties);
priorityPathDistributionStrategy.activate(context);
ReplicationQueueItem replicationPackage = mock(ReplicationQueueItem.class);
when(replicationPackage.getPaths()).thenReturn(new String[]{"/apps/some/stuff"});
ReplicationQueueProvider queueProvider = mock(ReplicationQueueProvider.class);
ReplicationQueue queue = mock(ReplicationQueue.class);
when(queueProvider.getQueue("agentName", "/apps")).thenReturn(queue);
when(queue.add(replicationPackage)).thenReturn(true);
ReplicationQueueItemState returnedState = priorityPathDistributionStrategy.add("agentName", replicationPackage, queueProvider);
assertNull(returnedState);
}
@Test
public void testPackageAdditionWithNotNullItemStateFromTheQueue() throws Exception {
PriorityPathDistributionStrategy priorityPathDistributionStrategy = new PriorityPathDistributionStrategy();
ComponentContext context = mock(ComponentContext.class);
Dictionary properties = mock(Dictionary.class);
when(properties.get("priority.paths")).thenReturn(new String[]{"/content", "/apps"});
when(context.getProperties()).thenReturn(properties);
priorityPathDistributionStrategy.activate(context);
ReplicationQueueItem replicationPackage = mock(ReplicationQueueItem.class);
when(replicationPackage.getPaths()).thenReturn(new String[]{"/etc"});
ReplicationQueueProvider queueProvider = mock(ReplicationQueueProvider.class);
ReplicationQueue queue = mock(ReplicationQueue.class);
when(queueProvider.getDefaultQueue("agentName")).thenReturn(queue);
when(queue.add(replicationPackage)).thenReturn(true);
ReplicationQueueItemState state = mock(ReplicationQueueItemState.class);
when(queue.getStatus(replicationPackage)).thenReturn(state);
ReplicationQueueItemState returnedState = priorityPathDistributionStrategy.add("agentName", replicationPackage, queueProvider);
assertNotNull(returnedState);
}
@Test
public void testPackageAdditionWithNotNullItemStateFromTheQueueOnPriorityPath() throws Exception {
PriorityPathDistributionStrategy priorityPathDistributionStrategy = new PriorityPathDistributionStrategy();
ComponentContext context = mock(ComponentContext.class);
Dictionary properties = mock(Dictionary.class);
when(properties.get("priority.paths")).thenReturn(new String[]{"/content", "/apps"});
when(context.getProperties()).thenReturn(properties);
priorityPathDistributionStrategy.activate(context);
ReplicationQueueItem replicationPackage = mock(ReplicationQueueItem.class);
when(replicationPackage.getPaths()).thenReturn(new String[]{"/apps"});
ReplicationQueueProvider queueProvider = mock(ReplicationQueueProvider.class);
ReplicationQueue queue = mock(ReplicationQueue.class);
when(queueProvider.getQueue("agentName", "/apps")).thenReturn(queue);
when(queue.add(replicationPackage)).thenReturn(true);
ReplicationQueueItemState state = mock(ReplicationQueueItemState.class);
when(queue.getStatus(replicationPackage)).thenReturn(state);
ReplicationQueueItemState returnedState = priorityPathDistributionStrategy.add("agentName", replicationPackage, queueProvider);
assertNotNull(returnedState);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.pool;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.j2ee.statistics.BoundedRangeStatistic;
import javax.management.j2ee.statistics.CountStatistic;
import javax.management.j2ee.statistics.Stats;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.TimeUnit;
import org.apache.geronimo.gbean.GBeanInfo;
import org.apache.geronimo.gbean.GBeanInfoBuilder;
import org.apache.geronimo.gbean.GBeanLifecycle;
import org.apache.geronimo.management.J2EEManagedObject;
import org.apache.geronimo.management.StatisticsProvider;
import org.apache.geronimo.management.geronimo.stats.ThreadPoolStats;
import org.apache.geronimo.management.stats.BoundedRangeStatisticImpl;
import org.apache.geronimo.management.stats.CountStatisticImpl;
import org.apache.geronimo.management.stats.StatsImpl;
/**
* @version $Rev$ $Date$
*/
public class ThreadPool implements GeronimoExecutor, GBeanLifecycle, J2EEManagedObject, StatisticsProvider {
private ThreadPoolExecutor executor;
private ClassLoader classLoader;
private ObjectName objectName;
private boolean waitWhenBlocked;
// Statistics-related fields follow
private boolean statsActive = true;
private PoolStatsImpl stats = new PoolStatsImpl();
private Map<String, Integer> clients = new HashMap<String, Integer>();
public ThreadPool(int minPoolSize, int maxPoolSize, String poolName, long keepAliveTime, ClassLoader classLoader, String objectName) {
ThreadPoolExecutor p = new ThreadPoolExecutor(
minPoolSize, // core size
maxPoolSize, // max size
keepAliveTime, TimeUnit.MILLISECONDS,
new SynchronousQueue<Runnable>());
p.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
p.setThreadFactory(new ThreadPoolThreadFactory(poolName, classLoader));
try {
this.objectName = ObjectName.getInstance(objectName);
} catch (MalformedObjectNameException e) {
throw new IllegalStateException("Bad object name injected: " + e.getMessage(), e);
}
executor = p;
this.classLoader = classLoader;
// set pool stats start time
stats.setStartTime();
}
public String getName() {
return objectName.getKeyProperty("name");
}
public String getObjectName() {
return objectName.getCanonicalName();
}
public boolean isEventProvider() {
return true;
}
public boolean isStateManageable() {
return true;
}
public boolean isStatisticsProvider() {
return true;
}
public Stats getStats() {
stats.threadsInUse.setLowerBound(0);
stats.threadsInUse.setUpperBound(executor.getMaximumPoolSize());
int inUse = executor.getPoolSize();
stats.threadsInUse.setCurrent(inUse);
if (inUse < stats.threadsInUse.getLowWaterMark()) {
stats.threadsInUse.setLowWaterMark(inUse);
}
if (inUse > stats.threadsInUse.getHighWaterMark()) {
stats.threadsInUse.setHighWaterMark(inUse);
}
if (statsActive) {
synchronized (this) {
stats.prepareConsumers(clients);
}
} else {
stats.prepareConsumers(Collections.<String, Integer> emptyMap());
}
// set last sapmle time
stats.setLastSampleTime();
return stats;
}
/**
* Reset all statistics in PoolStatsImpl object
*/
public void resetStats() {
stats.threadsInUse.setLowerBound(0);
stats.threadsInUse.setUpperBound(0);
stats.threadsInUse.setCurrent(0);
stats.threadsInUse.setLowWaterMark(0);
stats.threadsInUse.setHighWaterMark(0);
stats.setStartTime();
}
public static class PoolStatsImpl extends StatsImpl implements ThreadPoolStats {
private BoundedRangeStatisticImpl threadsInUse = new BoundedRangeStatisticImpl(
"Threads In Use", "",
"The number of threads in use by this thread pool");
private Map<String, CountStatisticImpl> consumers = new HashMap<String, CountStatisticImpl>();
public PoolStatsImpl() {
addStat(threadsInUse.getName(), threadsInUse);
}
public BoundedRangeStatistic getThreadsInUse() {
return threadsInUse;
}
public CountStatistic getCountForConsumer(String consumer) {
return consumers.get(consumer);
}
public String[] getThreadConsumers() {
return consumers.keySet().toArray(new String[consumers.size()]);
}
public void prepareConsumers(Map<String, Integer> clients) {
Map<String, CountStatisticImpl> result = new HashMap<String, CountStatisticImpl>();
for (Map.Entry<String, Integer> entry : clients.entrySet()) {
String client = entry.getKey();
Integer count = entry.getValue();
CountStatisticImpl stat = consumers.get(client);
if (stat == null) {
stat = new CountStatisticImpl("Threads for " + client, "", "The number of threads used by the client known as '" + client + "'", count.intValue());
addStat(stat.getName(), stat);
} else {
consumers.remove(client);
stat.setCount(count.intValue());
}
result.put(client, stat);
}
for (Iterator<String> it = consumers.keySet().iterator(); it.hasNext();) {
String client = it.next();
removeStat((consumers.get(client)).getName());
}
consumers = result;
}
}
public int getPoolSize() {
return executor.getPoolSize();
}
public int getMaximumPoolSize() {
return executor.getMaximumPoolSize();
}
public int getActiveCount() {
return executor.getActiveCount();
}
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
return executor.awaitTermination(timeout, unit);
}
public void execute(Runnable command) {
execute("Unknown", command);
}
public void execute(Runnable command, long timeout, TimeUnit unit) {
execute("Unknown", command, timeout, unit);
}
public void execute(final String consumerName, final Runnable command, long timeout, TimeUnit unit) {
if (waitWhenBlocked) {
addToQueue(command, timeout, unit);
} else {
try {
execute(consumerName, command);
} catch (RejectedExecutionException e) {
addToQueue(command, timeout, unit);
}
}
}
private void addToQueue(Runnable command, long timeout, TimeUnit unit) {
try {
boolean added = executor.getQueue().offer(command, timeout, unit);
if (!added) {
throw new RejectedExecutionException();
}
} catch (InterruptedException e) {
throw new RejectedExecutionException(e);
}
}
public void execute(final String consumerName, final Runnable runnable) {
Runnable command;
if (statsActive) {
command = new Runnable() {
public void run() {
startWork(consumerName);
try {
runnable.run();
} finally {
finishWork(consumerName);
}
}
};
} else {
command = runnable;
}
ThreadPoolExecutor p;
synchronized (this) {
p = executor;
}
if (p == null) {
throw new IllegalStateException("ThreadPool has been stopped");
}
Runnable task = new ContextClassLoaderRunnable(command, classLoader);
p.execute(task);
}
private synchronized void startWork(String consumerName) {
Integer test = clients.get(consumerName);
if (test == null) {
clients.put(consumerName, Integer.valueOf(1));
} else {
clients.put(consumerName, Integer.valueOf(test.intValue() + 1));
}
}
private synchronized void finishWork(String consumerName) {
Integer test = clients.get(consumerName);
if (test.intValue() == 1) {
clients.remove(consumerName);
} else {
clients.put(consumerName, Integer.valueOf(test.intValue() - 1));
}
}
private static class WaitWhenBlockedPolicy
implements RejectedExecutionHandler
{
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) throws RejectedExecutionException {
try {
executor.getQueue().put(r);
}
catch (InterruptedException e) {
throw new RejectedExecutionException(e);
}
}
}
public void setWaitWhenBlocked(boolean wait) {
waitWhenBlocked = wait;
if(wait) {
executor.setRejectedExecutionHandler(new WaitWhenBlockedPolicy());
} else {
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
}
}
public boolean isWaitWhenBlocked() {
return waitWhenBlocked;
}
public void doStart() throws Exception {
}
public void doStop() throws Exception {
ThreadPoolExecutor p;
synchronized (this) {
p = executor;
executor = null;
classLoader = null;
}
if (p != null) {
p.shutdownNow();
}
}
public void doFail() {
try {
doStop();
} catch (Exception e) {
}
}
private static final class ThreadPoolThreadFactory implements ThreadFactory {
private final String poolName;
private final ClassLoader classLoader;
private int nextWorkerID = 0;
public ThreadPoolThreadFactory(String poolName, ClassLoader classLoader) {
this.poolName = poolName;
this.classLoader = classLoader;
}
public Thread newThread(Runnable arg0) {
Thread thread = new Thread(arg0, poolName + " " + getNextWorkerID());
thread.setContextClassLoader(classLoader);
return thread;
}
private synchronized int getNextWorkerID() {
return nextWorkerID++;
}
}
private static final class ContextClassLoaderRunnable implements Runnable {
private Runnable task;
private ClassLoader classLoader;
public ContextClassLoaderRunnable(Runnable task, ClassLoader classLoader) {
this.task = task;
this.classLoader = classLoader;
}
public void run() {
Runnable myTask = task;
ClassLoader myClassLoader = classLoader;
// clear fields so they can be garbage collected
task = null;
classLoader = null;
if (myClassLoader != null) {
// we asumme the thread classloader is already set to our final class loader
// because the only to access the thread is wrapped with the Runnable or via the initial thread pool
try {
myTask.run();
} finally {
Thread.currentThread().setContextClassLoader(myClassLoader);
}
}
}
}
public static final GBeanInfo GBEAN_INFO;
static {
GBeanInfoBuilder infoFactory = GBeanInfoBuilder.createStatic(ThreadPool.class, "GBean");
infoFactory.addAttribute("minPoolSize", int.class, true);
infoFactory.addAttribute("maxPoolSize", int.class, true);
infoFactory.addAttribute("poolName", String.class, true);
infoFactory.addAttribute("keepAliveTime", long.class, true);
infoFactory.addAttribute("waitWhenBlocked", boolean.class, true);
infoFactory.addAttribute("objectName", String.class, false);
infoFactory.addAttribute("classLoader", ClassLoader.class, false);
infoFactory.addInterface(GeronimoExecutor.class);
infoFactory.setConstructor(new String[]{"minPoolSize", "maxPoolSize", "poolName", "keepAliveTime", "classLoader", "objectName"});
GBEAN_INFO = infoFactory.getBeanInfo();
}
public static GBeanInfo getGBeanInfo() {
return GBEAN_INFO;
}
}
| |
package com.github.javaparser.symbolsolver.javaparsermodel;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.FieldDeclaration;
import com.github.javaparser.ast.body.Parameter;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.expr.*;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import com.github.javaparser.ast.stmt.Statement;
import com.github.javaparser.ast.type.UnknownType;
import com.github.javaparser.symbolsolver.core.resolution.Context;
import com.github.javaparser.symbolsolver.javaparsermodel.declarations.JavaParserSymbolDeclaration;
import com.github.javaparser.symbolsolver.logic.FunctionalInterfaceLogic;
import com.github.javaparser.symbolsolver.logic.InferenceContext;
import com.github.javaparser.symbolsolver.model.declarations.*;
import com.github.javaparser.symbolsolver.model.methods.MethodUsage;
import com.github.javaparser.symbolsolver.model.resolution.SymbolReference;
import com.github.javaparser.symbolsolver.model.resolution.TypeSolver;
import com.github.javaparser.symbolsolver.model.resolution.Value;
import com.github.javaparser.symbolsolver.model.typesystem.*;
import com.github.javaparser.symbolsolver.reflectionmodel.MyObjectProvider;
import com.github.javaparser.symbolsolver.reflectionmodel.ReflectionClassDeclaration;
import com.github.javaparser.symbolsolver.resolution.SymbolSolver;
import com.github.javaparser.symbolsolver.resolution.typesolvers.ReflectionTypeSolver;
import com.google.common.collect.ImmutableList;
import java.util.Optional;
import java.util.logging.ConsoleHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import static com.github.javaparser.symbolsolver.javaparser.Navigator.getParentNode;
public class TypeExtractor extends DefaultVisitorAdapter {
private static Logger logger = Logger.getLogger(TypeExtractor.class.getCanonicalName());
static {
logger.setLevel(Level.INFO);
ConsoleHandler consoleHandler = new ConsoleHandler();
consoleHandler.setLevel(Level.INFO);
logger.addHandler(consoleHandler);
}
private TypeSolver typeSolver;
private JavaParserFacade facade;
public TypeExtractor(TypeSolver typeSolver, JavaParserFacade facade) {
this.typeSolver = typeSolver;
this.facade = facade;
}
@Override
public Type visit(VariableDeclarator node, Boolean solveLambdas) {
if (getParentNode(node) instanceof FieldDeclaration) {
// FieldDeclaration parent = (FieldDeclaration) getParentNode(node);
return facade.convertToUsageVariableType(node);
} else if (getParentNode(node) instanceof VariableDeclarationExpr) {
// VariableDeclarationExpr parent = (VariableDeclarationExpr) getParentNode(node);
return facade.convertToUsageVariableType(node);
} else {
throw new UnsupportedOperationException(getParentNode(node).getClass().getCanonicalName());
}
}
@Override
public Type visit(Parameter node, Boolean solveLambdas) {
if (node.getType() instanceof UnknownType) {
throw new IllegalStateException("Parameter has unknown type: " + node);
}
return facade.convertToUsage(node.getType(), node);
}
@Override
public Type visit(ArrayAccessExpr node, Boolean solveLambdas) {
Type arrayUsageType = node.getName().accept(this, solveLambdas);
if (arrayUsageType.isArray()) {
return ((ArrayType) arrayUsageType).getComponentType();
}
return arrayUsageType;
}
@Override
public Type visit(ArrayCreationExpr node, Boolean solveLambdas) {
Type res = facade.convertToUsage(node.getElementType(), JavaParserFactory.getContext(node, typeSolver));
for (int i = 0; i < node.getLevels().size(); i++) {
res = new ArrayType(res);
}
return res;
}
@Override
public Type visit(ArrayInitializerExpr node, Boolean solveLambdas) {
throw new UnsupportedOperationException(node.getClass().getCanonicalName());
}
@Override
public Type visit(AssignExpr node, Boolean solveLambdas) {
return node.getTarget().accept(this, solveLambdas);
}
@Override
public Type visit(BinaryExpr node, Boolean solveLambdas) {
switch (node.getOperator()) {
case PLUS:
case MINUS:
case DIVIDE:
case MULTIPLY:
return facade.getBinaryTypeConcrete(node.getLeft(), node.getRight(), solveLambdas);
case LESS_EQUALS:
case LESS:
case GREATER:
case GREATER_EQUALS:
case EQUALS:
case NOT_EQUALS:
case OR:
case AND:
return PrimitiveType.BOOLEAN;
case BINARY_AND:
case BINARY_OR:
case SIGNED_RIGHT_SHIFT:
case UNSIGNED_RIGHT_SHIFT:
case LEFT_SHIFT:
case REMAINDER:
case XOR:
return node.getLeft().accept(this, solveLambdas);
default:
throw new UnsupportedOperationException("FOO " + node.getOperator().name());
}
}
@Override
public Type visit(CastExpr node, Boolean solveLambdas) {
return facade.convertToUsage(node.getType(), JavaParserFactory.getContext(node, typeSolver));
}
@Override
public Type visit(ClassExpr node, Boolean solveLambdas) {
// This implementation does not regard the actual type argument of the ClassExpr.
com.github.javaparser.ast.type.Type astType = node.getType();
Type jssType = facade.convertToUsage(astType, node.getType());
return new ReferenceTypeImpl(new ReflectionClassDeclaration(Class.class, typeSolver), ImmutableList.of(jssType), typeSolver);
}
@Override
public Type visit(ConditionalExpr node, Boolean solveLambdas) {
return node.getThenExpr().accept(this, solveLambdas);
}
@Override
public Type visit(EnclosedExpr node, Boolean solveLambdas) {
return node.getInner().get().accept(this, solveLambdas);
}
@Override
public Type visit(FieldAccessExpr node, Boolean solveLambdas) {
// We should understand if this is a static access
if (node.getScope().isPresent() && node.getScope().get() instanceof NameExpr) {
NameExpr staticValue = (NameExpr) node.getScope().get();
SymbolReference<TypeDeclaration> typeAccessedStatically = JavaParserFactory.getContext(node, typeSolver).solveType(staticValue.toString(), typeSolver);
if (typeAccessedStatically.isSolved()) {
// TODO here maybe we have to substitute type typeParametersValues
return ((ReferenceTypeDeclaration) typeAccessedStatically.getCorrespondingDeclaration()).getField(node.getField().getId()).getType();
}
} else if (node.getScope().isPresent() && node.getScope().get() instanceof ThisExpr){
// If we are accessing through a 'this' expression, first resolve the type
// corresponding to 'this'
SymbolReference<TypeDeclaration> solve = facade.solve((ThisExpr) node.getScope().get());
// If found get it's declaration and get the field in there
if (solve.isSolved()){
TypeDeclaration correspondingDeclaration = solve.getCorrespondingDeclaration();
if (correspondingDeclaration instanceof ReferenceTypeDeclaration){
return ((ReferenceTypeDeclaration) correspondingDeclaration).getField(node.getField().getId()).getType();
}
}
} else if (node.getScope().isPresent() && node.getScope().get().toString().indexOf('.') > 0) {
// try to find fully qualified name
SymbolReference<ReferenceTypeDeclaration> sr = typeSolver.tryToSolveType(node.getScope().get().toString());
if (sr.isSolved()) {
return sr.getCorrespondingDeclaration().getField(node.getField().getId()).getType();
}
}
Optional<Value> value = null;
try {
value = new SymbolSolver(typeSolver).solveSymbolAsValue(node.getField().getId(), node);
} catch (UnsolvedSymbolException use) {
// Deal with badly parsed FieldAccessExpr that are in fact fqn classes
if (node.getParentNode().isPresent() && node.getParentNode().get() instanceof FieldAccessExpr) {
throw use;
}
SymbolReference<ReferenceTypeDeclaration> sref = typeSolver.tryToSolveType(node.toString());
if (sref.isSolved()) {
return new ReferenceTypeImpl(sref.getCorrespondingDeclaration(), typeSolver);
}
}
if (value != null && value.isPresent()) {
return value.get().getType();
} else {
throw new UnsolvedSymbolException(node.getField().getId());
}
}
@Override
public Type visit(InstanceOfExpr node, Boolean solveLambdas) {
return PrimitiveType.BOOLEAN;
}
@Override
public Type visit(StringLiteralExpr node, Boolean solveLambdas) {
return new ReferenceTypeImpl(new ReflectionTypeSolver().solveType("java.lang.String"), typeSolver);
}
@Override
public Type visit(IntegerLiteralExpr node, Boolean solveLambdas) {
return PrimitiveType.INT;
}
@Override
public Type visit(LongLiteralExpr node, Boolean solveLambdas) {
return PrimitiveType.LONG;
}
@Override
public Type visit(CharLiteralExpr node, Boolean solveLambdas) {
return PrimitiveType.CHAR;
}
@Override
public Type visit(DoubleLiteralExpr node, Boolean solveLambdas) {
if (node.getValue().toLowerCase().endsWith("f")) {
return PrimitiveType.FLOAT;
}
return PrimitiveType.DOUBLE;
}
@Override
public Type visit(BooleanLiteralExpr node, Boolean solveLambdas) {
return PrimitiveType.BOOLEAN;
}
@Override
public Type visit(NullLiteralExpr node, Boolean solveLambdas) {
return NullType.INSTANCE;
}
@Override
public Type visit(MethodCallExpr node, Boolean solveLambdas) {
logger.finest("getType on method call " + node);
// first solve the method
MethodUsage ref = facade.solveMethodAsUsage(node);
logger.finest("getType on method call " + node + " resolved to " + ref);
logger.finest("getType on method call " + node + " return type is " + ref.returnType());
return ref.returnType();
// the type is the return type of the method
}
@Override
public Type visit(NameExpr node, Boolean solveLambdas) {
logger.finest("getType on name expr " + node);
Optional<Value> value = new SymbolSolver(typeSolver).solveSymbolAsValue(node.getName().getId(), node);
if (!value.isPresent()) {
throw new UnsolvedSymbolException("Solving " + node, node.getName().getId());
} else {
return value.get().getType();
}
}
@Override
public Type visit(ObjectCreationExpr node, Boolean solveLambdas) {
Type type = facade.convertToUsage(node.getType(), node);
return type;
}
@Override
public Type visit(ThisExpr node, Boolean solveLambdas) {
// If 'this' is prefixed by a class eg. MyClass.this
if (node.getClassExpr().isPresent()){
// Get the class name
String className = node.getClassExpr().get().toString();
// Attempt to resolve using a typeSolver
SymbolReference<ReferenceTypeDeclaration> clazz = typeSolver.tryToSolveType(className);
if (clazz.isSolved()){
return new ReferenceTypeImpl(clazz.getCorrespondingDeclaration(),typeSolver);
}
// Attempt to resolve locally in Compilation unit
Optional<CompilationUnit> cu = node.getAncestorOfType(CompilationUnit.class);
if (cu.isPresent()){
Optional<ClassOrInterfaceDeclaration> classByName = cu.get().getClassByName(className);
if (classByName.isPresent()){
return new ReferenceTypeImpl(facade.getTypeDeclaration(classByName.get()), typeSolver);
}
}
}
return new ReferenceTypeImpl(facade.getTypeDeclaration(facade.findContainingTypeDecl(node)), typeSolver);
}
@Override
public Type visit(SuperExpr node, Boolean solveLambdas) {
TypeDeclaration typeOfNode = facade.getTypeDeclaration(facade.findContainingTypeDecl(node));
if (typeOfNode instanceof ClassDeclaration) {
return ((ClassDeclaration) typeOfNode).getSuperClass();
} else {
throw new UnsupportedOperationException(node.getClass().getCanonicalName());
}
}
@Override
public Type visit(UnaryExpr node, Boolean solveLambdas) {
switch (node.getOperator()) {
case MINUS:
case PLUS:
return node.getExpression().accept(this, solveLambdas);
case LOGICAL_COMPLEMENT:
return PrimitiveType.BOOLEAN;
case POSTFIX_DECREMENT:
case PREFIX_DECREMENT:
case POSTFIX_INCREMENT:
case PREFIX_INCREMENT:
return node.getExpression().accept(this, solveLambdas);
default:
throw new UnsupportedOperationException(node.getOperator().name());
}
}
@Override
public Type visit(VariableDeclarationExpr node, Boolean solveLambdas) {
if (node.getVariables().size() != 1) {
throw new UnsupportedOperationException();
}
return facade.convertToUsageVariableType(node.getVariables().get(0));
}
@Override
public Type visit(LambdaExpr node, Boolean solveLambdas) {
if (getParentNode(node) instanceof MethodCallExpr) {
MethodCallExpr callExpr = (MethodCallExpr) getParentNode(node);
int pos = JavaParserSymbolDeclaration.getParamPos(node);
SymbolReference<MethodDeclaration> refMethod = facade.solve(callExpr);
if (!refMethod.isSolved()) {
throw new UnsolvedSymbolException(getParentNode(node).toString(), callExpr.getName().getId());
}
logger.finest("getType on lambda expr " + refMethod.getCorrespondingDeclaration().getName());
if (solveLambdas) {
// The type parameter referred here should be the java.util.stream.Stream.T
Type result = refMethod.getCorrespondingDeclaration().getParam(pos).getType();
if (callExpr.getScope().isPresent()) {
Expression scope = callExpr.getScope().get();
// If it is a static call we should not try to get the type of the scope
boolean staticCall = false;
if (scope instanceof NameExpr) {
NameExpr nameExpr = (NameExpr) scope;
try {
SymbolReference<TypeDeclaration> type = JavaParserFactory.getContext(nameExpr, typeSolver).solveType(nameExpr.getName().getId(), typeSolver);
if (type.isSolved()){
staticCall = true;
}
} catch (Exception e) {
}
}
if (!staticCall) {
Type scopeType = facade.getType(scope);
if (scopeType.isReferenceType()) {
result = scopeType.asReferenceType().useThisTypeParametersOnTheGivenType(result);
}
}
}
// We need to replace the type variables
Context ctx = JavaParserFactory.getContext(node, typeSolver);
result = facade.solveGenericTypes(result, ctx, typeSolver);
//We should find out which is the functional method (e.g., apply) and replace the params of the
//solveLambdas with it, to derive so the values. We should also consider the value returned by the
//lambdas
Optional<MethodUsage> functionalMethod = FunctionalInterfaceLogic.getFunctionalMethod(result);
if (functionalMethod.isPresent()) {
LambdaExpr lambdaExpr = node;
InferenceContext lambdaCtx = new InferenceContext(MyObjectProvider.INSTANCE);
InferenceContext funcInterfaceCtx = new InferenceContext(MyObjectProvider.INSTANCE);
// At this point parameterType
// if Function<T=? super Stream.T, ? extends map.R>
// we should replace Stream.T
Type functionalInterfaceType = ReferenceTypeImpl.undeterminedParameters(functionalMethod.get().getDeclaration().declaringType(), typeSolver);
lambdaCtx.addPair(result, functionalInterfaceType);
Type actualType;
if (lambdaExpr.getBody() instanceof ExpressionStmt) {
actualType = facade.getType(((ExpressionStmt)lambdaExpr.getBody()).getExpression());
} else if (lambdaExpr.getBody() instanceof BlockStmt) {
BlockStmt blockStmt = (BlockStmt) lambdaExpr.getBody();
NodeList<Statement> statements = blockStmt.getStatements();
// Get the last statement in the block and use it's type
actualType = facade.getType(statements.get(statements.size() - 1).getChildNodes().get(0));
} else {
throw new UnsupportedOperationException();
}
Type formalType = functionalMethod.get().returnType();
// Infer the functional interfaces' return vs actual type
funcInterfaceCtx.addPair(formalType, actualType);
// Substitute to obtain a new type
Type functionalTypeWithReturn = funcInterfaceCtx.resolve(funcInterfaceCtx.addSingle(functionalInterfaceType));
// if the functional method returns void anyway
// we don't need to bother inferring types
if (!(formalType instanceof VoidType)){
lambdaCtx.addPair(result, functionalTypeWithReturn);
result = lambdaCtx.resolve(lambdaCtx.addSingle(result));
}
}
return result;
} else {
return refMethod.getCorrespondingDeclaration().getParam(pos).getType();
}
} else {
throw new UnsupportedOperationException("The type of a lambda expr depends on the position and its return value");
}
}
@Override
public Type visit(MethodReferenceExpr node, Boolean solveLambdas) {
if (getParentNode(node) instanceof MethodCallExpr) {
MethodCallExpr callExpr = (MethodCallExpr) getParentNode(node);
int pos = JavaParserSymbolDeclaration.getParamPos(node);
SymbolReference<com.github.javaparser.symbolsolver.model.declarations.MethodDeclaration> refMethod = facade.solve(callExpr, false);
if (!refMethod.isSolved()) {
throw new UnsolvedSymbolException(getParentNode(node).toString(), callExpr.getName().getId());
}
logger.finest("getType on method reference expr " + refMethod.getCorrespondingDeclaration().getName());
//logger.finest("Method param " + refMethod.getCorrespondingDeclaration().getParam(pos));
if (solveLambdas) {
MethodUsage usage = facade.solveMethodAsUsage(callExpr);
Type result = usage.getParamType(pos);
// We need to replace the type variables
Context ctx = JavaParserFactory.getContext(node, typeSolver);
result = facade.solveGenericTypes(result, ctx, typeSolver);
//We should find out which is the functional method (e.g., apply) and replace the params of the
//solveLambdas with it, to derive so the values. We should also consider the value returned by the
//lambdas
Optional<MethodUsage> functionalMethod = FunctionalInterfaceLogic.getFunctionalMethod(result);
if (functionalMethod.isPresent()) {
if (node instanceof MethodReferenceExpr) {
MethodReferenceExpr methodReferenceExpr = (MethodReferenceExpr) node;
Type actualType = facade.toMethodUsage(methodReferenceExpr).returnType();
Type formalType = functionalMethod.get().returnType();
InferenceContext inferenceContext = new InferenceContext(MyObjectProvider.INSTANCE);
inferenceContext.addPair(formalType, actualType);
result = inferenceContext.resolve(inferenceContext.addSingle(result));
}
}
return result;
} else {
return refMethod.getCorrespondingDeclaration().getParam(pos).getType();
}
} else {
throw new UnsupportedOperationException("The type of a method reference expr depends on the position and its return value");
}
}
}
| |
package org.apache.lucene.queryparser.xml;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.Version;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.BeforeClass;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.List;
public class TestParser extends LuceneTestCase {
private static CoreParser builder;
private static Directory dir;
private static IndexReader reader;
private static IndexSearcher searcher;
@BeforeClass
public static void beforeClass() throws Exception {
// TODO: rewrite test (this needs to set QueryParser.enablePositionIncrements, too, for work with CURRENT):
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET);
//initialize the parser
builder = new CorePlusExtensionsParser("contents", analyzer);
BufferedReader d = new BufferedReader(new InputStreamReader(
TestParser.class.getResourceAsStream("reuters21578.txt"), StandardCharsets.US_ASCII));
dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
String line = d.readLine();
while (line != null) {
int endOfDate = line.indexOf('\t');
String date = line.substring(0, endOfDate).trim();
String content = line.substring(endOfDate).trim();
Document doc = new Document();
doc.add(newTextField("date", date, Field.Store.YES));
doc.add(newTextField("contents", content, Field.Store.YES));
doc.add(new IntField("date2", Integer.valueOf(date), Field.Store.NO));
writer.addDocument(doc);
line = d.readLine();
}
d.close();
writer.close();
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
}
@AfterClass
public static void afterClass() throws Exception {
reader.close();
dir.close();
reader = null;
searcher = null;
dir = null;
builder = null;
}
public void testSimpleXML() throws ParserException, IOException {
Query q = parse("TermQuery.xml");
dumpResults("TermQuery", q, 5);
}
public void testSimpleTermsQueryXML() throws ParserException, IOException {
Query q = parse("TermsQuery.xml");
dumpResults("TermsQuery", q, 5);
}
public void testBooleanQueryXML() throws ParserException, IOException {
Query q = parse("BooleanQuery.xml");
dumpResults("BooleanQuery", q, 5);
}
public void testDisjunctionMaxQueryXML() throws ParserException, IOException {
Query q = parse("DisjunctionMaxQuery.xml");
assertTrue(q instanceof DisjunctionMaxQuery);
DisjunctionMaxQuery d = (DisjunctionMaxQuery)q;
assertEquals(0.0f, d.getTieBreakerMultiplier(), 0.0001f);
assertEquals(2, d.getDisjuncts().size());
DisjunctionMaxQuery ndq = (DisjunctionMaxQuery) d.getDisjuncts().get(1);
assertEquals(1.2f, ndq.getTieBreakerMultiplier(), 0.0001f);
assertEquals(1, ndq.getDisjuncts().size());
}
public void testRangeFilterQueryXML() throws ParserException, IOException {
Query q = parse("RangeFilterQuery.xml");
dumpResults("RangeFilter", q, 5);
}
public void testUserQueryXML() throws ParserException, IOException {
Query q = parse("UserInputQuery.xml");
dumpResults("UserInput with Filter", q, 5);
}
public void testCustomFieldUserQueryXML() throws ParserException, IOException {
Query q = parse("UserInputQueryCustomField.xml");
int h = searcher.search(q, null, 1000).totalHits;
assertEquals("UserInputQueryCustomField should produce 0 result ", 0, h);
}
public void testLikeThisQueryXML() throws Exception {
Query q = parse("LikeThisQuery.xml");
dumpResults("like this", q, 5);
}
public void testBoostingQueryXML() throws Exception {
Query q = parse("BoostingQuery.xml");
dumpResults("boosting ", q, 5);
}
public void testFuzzyLikeThisQueryXML() throws Exception {
Query q = parse("FuzzyLikeThisQuery.xml");
//show rewritten fuzzyLikeThisQuery - see what is being matched on
if (VERBOSE) {
System.out.println(q.rewrite(reader));
}
dumpResults("FuzzyLikeThis", q, 5);
}
public void testTermsFilterXML() throws Exception {
Query q = parse("TermsFilterQuery.xml");
dumpResults("Terms Filter", q, 5);
}
public void testBoostingTermQueryXML() throws Exception {
Query q = parse("BoostingTermQuery.xml");
dumpResults("BoostingTermQuery", q, 5);
}
public void testSpanTermXML() throws Exception {
Query q = parse("SpanQuery.xml");
dumpResults("Span Query", q, 5);
}
public void testConstantScoreQueryXML() throws Exception {
Query q = parse("ConstantScoreQuery.xml");
dumpResults("ConstantScoreQuery", q, 5);
}
public void testMatchAllDocsPlusFilterXML() throws ParserException, IOException {
Query q = parse("MatchAllDocsQuery.xml");
dumpResults("MatchAllDocsQuery with range filter", q, 5);
}
public void testBooleanFilterXML() throws ParserException, IOException {
Query q = parse("BooleanFilter.xml");
dumpResults("Boolean filter", q, 5);
}
public void testNestedBooleanQuery() throws ParserException, IOException {
Query q = parse("NestedBooleanQuery.xml");
dumpResults("Nested Boolean query", q, 5);
}
public void testCachedFilterXML() throws ParserException, IOException {
Query q = parse("CachedFilter.xml");
dumpResults("Cached filter", q, 5);
}
public void testDuplicateFilterQueryXML() throws ParserException, IOException {
List<AtomicReaderContext> leaves = searcher.getTopReaderContext().leaves();
Assume.assumeTrue(leaves.size() == 1);
Query q = parse("DuplicateFilterQuery.xml");
int h = searcher.search(q, null, 1000).totalHits;
assertEquals("DuplicateFilterQuery should produce 1 result ", 1, h);
}
public void testNumericRangeFilterQueryXML() throws ParserException, IOException {
Query q = parse("NumericRangeFilterQuery.xml");
dumpResults("NumericRangeFilter", q, 5);
}
public void testNumericRangeQueryQueryXML() throws ParserException, IOException {
Query q = parse("NumericRangeQueryQuery.xml");
dumpResults("NumericRangeQuery", q, 5);
}
//================= Helper methods ===================================
private Query parse(String xmlFileName) throws ParserException, IOException {
InputStream xmlStream = TestParser.class.getResourceAsStream(xmlFileName);
Query result = builder.parse(xmlStream);
xmlStream.close();
return result;
}
private void dumpResults(String qType, Query q, int numDocs) throws IOException {
if (VERBOSE) {
System.out.println("TEST: query=" + q);
}
TopDocs hits = searcher.search(q, null, numDocs);
assertTrue(qType + " should produce results ", hits.totalHits > 0);
if (VERBOSE) {
System.out.println("=========" + qType + "============");
ScoreDoc[] scoreDocs = hits.scoreDocs;
for (int i = 0; i < Math.min(numDocs, hits.totalHits); i++) {
Document ldoc = searcher.doc(scoreDocs[i].doc);
System.out.println("[" + ldoc.get("date") + "]" + ldoc.get("contents"));
}
System.out.println();
}
}
}
| |
/*
* Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.resources.sqs;
import java.util.List;
import java.util.Map;
import com.amazonaws.resources.ResultCapture;
import com.amazonaws.services.sqs.model.AddPermissionRequest;
import com.amazonaws.services.sqs.model.ChangeMessageVisibilityBatchRequest;
import com.amazonaws.services.sqs.model.ChangeMessageVisibilityBatchRequestEntry
;
import com.amazonaws.services.sqs.model.ChangeMessageVisibilityBatchResult;
import com.amazonaws.services.sqs.model.DeleteMessageBatchRequest;
import com.amazonaws.services.sqs.model.DeleteMessageBatchRequestEntry;
import com.amazonaws.services.sqs.model.DeleteMessageBatchResult;
import com.amazonaws.services.sqs.model.DeleteQueueRequest;
import com.amazonaws.services.sqs.model.GetQueueAttributesRequest;
import com.amazonaws.services.sqs.model.GetQueueAttributesResult;
import com.amazonaws.services.sqs.model.ListDeadLetterSourceQueuesRequest;
import com.amazonaws.services.sqs.model.ReceiveMessageRequest;
import com.amazonaws.services.sqs.model.ReceiveMessageResult;
import com.amazonaws.services.sqs.model.RemovePermissionRequest;
import com.amazonaws.services.sqs.model.SendMessageBatchRequest;
import com.amazonaws.services.sqs.model.SendMessageBatchRequestEntry;
import com.amazonaws.services.sqs.model.SendMessageBatchResult;
import com.amazonaws.services.sqs.model.SendMessageRequest;
import com.amazonaws.services.sqs.model.SendMessageResult;
import com.amazonaws.services.sqs.model.SetQueueAttributesRequest;
/**
* The <code>Queue</code> resource.
* Each <code>Queue</code> object is uniquely identified by these identifier(s):
* <ul>
* <li>Url</li>
* </ul>
*/
public interface Queue {
/**
* Returns true if this resource's attributes have been loaded. If this
* method returns {@code false}, calls to attribute getter methods on this
* instance will make an implicit call to {@code load()} to retrieve the
* value.
*/
boolean isLoaded();
/**
* Makes a call to the service to load this resource's attributes if they
* are not loaded yet.
*
* @return Returns {@code true} if the resource is not yet loaded when this
* method was invoked, which indicates that a service call has been
* made to retrieve the attributes.
* @see #load(GetQueueAttributesRequest)
*/
boolean load();
/**
* Makes a call to the service to load this resource's attributes if they
* are not loaded yet.
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* <li>
* <b><code>AttributeNames.0</code></b>
* - constant value <code>All</code>.
* </li>
* </ul>
*
* <p>
*
* @return Returns {@code true} if the resource is not yet loaded when this
* method was invoked, which indicates that a service call has been
* made to retrieve the attributes.
* @see GetQueueAttributesRequest
*/
boolean load(GetQueueAttributesRequest request);
/**
* Makes a call to the service to load this resource's attributes if they
* are not loaded yet, and use a ResultCapture to retrieve the low-level
* client response
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* <li>
* <b><code>AttributeNames.0</code></b>
* - constant value <code>All</code>.
* </li>
* </ul>
*
* <p>
*
* @return Returns {@code true} if the resource is not yet loaded when this
* method was invoked, which indicates that a service call has been
* made to retrieve the attributes.
* @see GetQueueAttributesRequest
*/
boolean load(GetQueueAttributesRequest request,
ResultCapture<GetQueueAttributesResult> extractor);
/**
* Gets the value of the Url identifier. This method always directly returns
* the identifier and never involves a service call.
*/
String getUrl();
/**
* Gets the value of the Attributes attribute. If this resource is not yet
* loaded, a call to {@code load()} is made to retrieve the value of the
* attribute.
*/
Map<String, String> getAttributes();
/**
* Gets a subresource.
*/
Message getMessage(String receiptHandle);
/**
* Retrieves the DeadLetterSourceQueues collection referenced by this
* resource.
*/
QueueCollection getDeadLetterSourceQueues();
/**
* Retrieves the DeadLetterSourceQueues collection referenced by this
* resource.
*/
QueueCollection getDeadLetterSourceQueues(ListDeadLetterSourceQueuesRequest
request);
/**
* Performs the <code>SetAttributes</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @see SetQueueAttributesRequest
*/
void setAttributes(SetQueueAttributesRequest request);
/**
* Performs the <code>SetAttributes</code> action and use a ResultCapture to
* retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @see SetQueueAttributesRequest
*/
void setAttributes(SetQueueAttributesRequest request, ResultCapture<Void>
extractor);
/**
* The convenient method form for the <code>SetAttributes</code> action.
*
* @see #setAttributes(SetQueueAttributesRequest)
*/
void setAttributes(Map<String, String> attributes);
/**
* The convenient method form for the <code>SetAttributes</code> action.
*
* @see #setAttributes(SetQueueAttributesRequest, ResultCapture)
*/
void setAttributes(Map<String, String> attributes, ResultCapture<Void>
extractor);
/**
* Performs the <code>AddPermission</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @see AddPermissionRequest
*/
void addPermission(AddPermissionRequest request);
/**
* Performs the <code>AddPermission</code> action and use a ResultCapture to
* retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @see AddPermissionRequest
*/
void addPermission(AddPermissionRequest request, ResultCapture<Void>
extractor);
/**
* The convenient method form for the <code>AddPermission</code> action.
*
* @see #addPermission(AddPermissionRequest)
*/
void addPermission(List<String> actions, String label, List<String>
aWSAccountIds);
/**
* The convenient method form for the <code>AddPermission</code> action.
*
* @see #addPermission(AddPermissionRequest, ResultCapture)
*/
void addPermission(List<String> actions, String label, List<String>
aWSAccountIds, ResultCapture<Void> extractor);
/**
* Performs the <code>SendMessage</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return The response of the low-level client operation associated with
* this resource action.
* @see SendMessageRequest
*/
SendMessageResult sendMessage(SendMessageRequest request);
/**
* Performs the <code>SendMessage</code> action and use a ResultCapture to
* retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return The response of the low-level client operation associated with
* this resource action.
* @see SendMessageRequest
*/
SendMessageResult sendMessage(SendMessageRequest request,
ResultCapture<SendMessageResult> extractor);
/**
* The convenient method form for the <code>SendMessage</code> action.
*
* @see #sendMessage(SendMessageRequest)
*/
SendMessageResult sendMessage(String messageBody);
/**
* The convenient method form for the <code>SendMessage</code> action.
*
* @see #sendMessage(SendMessageRequest, ResultCapture)
*/
SendMessageResult sendMessage(String messageBody,
ResultCapture<SendMessageResult> extractor);
/**
* Performs the <code>DeleteMessages</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return The response of the low-level client operation associated with
* this resource action.
* @see DeleteMessageBatchRequest
*/
DeleteMessageBatchResult deleteMessages(DeleteMessageBatchRequest request);
/**
* Performs the <code>DeleteMessages</code> action and use a ResultCapture
* to retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return The response of the low-level client operation associated with
* this resource action.
* @see DeleteMessageBatchRequest
*/
DeleteMessageBatchResult deleteMessages(DeleteMessageBatchRequest request,
ResultCapture<DeleteMessageBatchResult> extractor);
/**
* The convenient method form for the <code>DeleteMessages</code> action.
*
* @see #deleteMessages(DeleteMessageBatchRequest)
*/
DeleteMessageBatchResult deleteMessages();
/**
* The convenient method form for the <code>DeleteMessages</code> action.
*
* @see #deleteMessages(DeleteMessageBatchRequest, ResultCapture)
*/
DeleteMessageBatchResult deleteMessages(
ResultCapture<DeleteMessageBatchResult> extractor);
/**
* The convenient method form for the <code>DeleteMessages</code> action.
*
* @see #deleteMessages(DeleteMessageBatchRequest)
*/
DeleteMessageBatchResult deleteMessages(List<DeleteMessageBatchRequestEntry>
entries);
/**
* The convenient method form for the <code>DeleteMessages</code> action.
*
* @see #deleteMessages(DeleteMessageBatchRequest, ResultCapture)
*/
DeleteMessageBatchResult deleteMessages(List<DeleteMessageBatchRequestEntry>
entries, ResultCapture<DeleteMessageBatchResult> extractor);
/**
* Performs the <code>ReceiveMessages</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return A list of <code>Message</code> resource objects associated with
* the result of this action.
* @see ReceiveMessageRequest
*/
List<com.amazonaws.resources.sqs.Message> receiveMessages(
ReceiveMessageRequest request);
/**
* Performs the <code>ReceiveMessages</code> action and use a ResultCapture
* to retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return A list of <code>Message</code> resource objects associated with
* the result of this action.
* @see ReceiveMessageRequest
*/
List<com.amazonaws.resources.sqs.Message> receiveMessages(
ReceiveMessageRequest request, ResultCapture<ReceiveMessageResult>
extractor);
/**
* The convenient method form for the <code>ReceiveMessages</code> action.
*
* @see #receiveMessages(ReceiveMessageRequest)
*/
List<com.amazonaws.resources.sqs.Message> receiveMessages();
/**
* The convenient method form for the <code>ReceiveMessages</code> action.
*
* @see #receiveMessages(ReceiveMessageRequest, ResultCapture)
*/
List<com.amazonaws.resources.sqs.Message> receiveMessages(
ResultCapture<ReceiveMessageResult> extractor);
/**
* Performs the <code>Delete</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @see DeleteQueueRequest
*/
void delete(DeleteQueueRequest request);
/**
* Performs the <code>Delete</code> action and use a ResultCapture to
* retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @see DeleteQueueRequest
*/
void delete(DeleteQueueRequest request, ResultCapture<Void> extractor);
/**
* The convenient method form for the <code>Delete</code> action.
*
* @see #delete(DeleteQueueRequest)
*/
void delete();
/**
* The convenient method form for the <code>Delete</code> action.
*
* @see #delete(DeleteQueueRequest, ResultCapture)
*/
void delete(ResultCapture<Void> extractor);
/**
* Performs the <code>ChangeMessageVisibilityBatch</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return The response of the low-level client operation associated with
* this resource action.
* @see ChangeMessageVisibilityBatchRequest
*/
ChangeMessageVisibilityBatchResult changeMessageVisibilityBatch(
ChangeMessageVisibilityBatchRequest request);
/**
* Performs the <code>ChangeMessageVisibilityBatch</code> action and use a
* ResultCapture to retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return The response of the low-level client operation associated with
* this resource action.
* @see ChangeMessageVisibilityBatchRequest
*/
ChangeMessageVisibilityBatchResult changeMessageVisibilityBatch(
ChangeMessageVisibilityBatchRequest request,
ResultCapture<ChangeMessageVisibilityBatchResult> extractor);
/**
* The convenient method form for the
* <code>ChangeMessageVisibilityBatch</code> action.
*
* @see #changeMessageVisibilityBatch(ChangeMessageVisibilityBatchRequest)
*/
ChangeMessageVisibilityBatchResult changeMessageVisibilityBatch(
List<ChangeMessageVisibilityBatchRequestEntry> entries);
/**
* The convenient method form for the
* <code>ChangeMessageVisibilityBatch</code> action.
*
* @see #changeMessageVisibilityBatch(ChangeMessageVisibilityBatchRequest,
* ResultCapture)
*/
ChangeMessageVisibilityBatchResult changeMessageVisibilityBatch(
List<ChangeMessageVisibilityBatchRequestEntry> entries,
ResultCapture<ChangeMessageVisibilityBatchResult> extractor);
/**
* Performs the <code>SendMessages</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return The response of the low-level client operation associated with
* this resource action.
* @see SendMessageBatchRequest
*/
SendMessageBatchResult sendMessages(SendMessageBatchRequest request);
/**
* Performs the <code>SendMessages</code> action and use a ResultCapture to
* retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @return The response of the low-level client operation associated with
* this resource action.
* @see SendMessageBatchRequest
*/
SendMessageBatchResult sendMessages(SendMessageBatchRequest request,
ResultCapture<SendMessageBatchResult> extractor);
/**
* The convenient method form for the <code>SendMessages</code> action.
*
* @see #sendMessages(SendMessageBatchRequest)
*/
SendMessageBatchResult sendMessages();
/**
* The convenient method form for the <code>SendMessages</code> action.
*
* @see #sendMessages(SendMessageBatchRequest, ResultCapture)
*/
SendMessageBatchResult sendMessages(ResultCapture<SendMessageBatchResult>
extractor);
/**
* The convenient method form for the <code>SendMessages</code> action.
*
* @see #sendMessages(SendMessageBatchRequest)
*/
SendMessageBatchResult sendMessages(List<SendMessageBatchRequestEntry>
entries);
/**
* The convenient method form for the <code>SendMessages</code> action.
*
* @see #sendMessages(SendMessageBatchRequest, ResultCapture)
*/
SendMessageBatchResult sendMessages(List<SendMessageBatchRequestEntry>
entries, ResultCapture<SendMessageBatchResult> extractor);
/**
* Performs the <code>RemovePermission</code> action.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @see RemovePermissionRequest
*/
void removePermission(RemovePermissionRequest request);
/**
* Performs the <code>RemovePermission</code> action and use a ResultCapture
* to retrieve the low-level client response.
*
* <p>
* The following request parameters will be populated from the data of this
* <code>Queue</code> resource, and any conflicting parameter value set in
* the request will be overridden:
* <ul>
* <li>
* <b><code>QueueUrl</code></b>
* - mapped from the <code>Url</code> identifier.
* </li>
* </ul>
*
* <p>
*
* @see RemovePermissionRequest
*/
void removePermission(RemovePermissionRequest request, ResultCapture<Void>
extractor);
/**
* The convenient method form for the <code>RemovePermission</code> action.
*
* @see #removePermission(RemovePermissionRequest)
*/
void removePermission(String label);
/**
* The convenient method form for the <code>RemovePermission</code> action.
*
* @see #removePermission(RemovePermissionRequest, ResultCapture)
*/
void removePermission(String label, ResultCapture<Void> extractor);
}
| |
package harmony.mastermind.memory;
import java.util.Calendar;
import java.util.GregorianCalendar;
import harmony.mastermind.memory.GenericMemory;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class TestGenericMemory {
private Calendar start = new GregorianCalendar();
private Calendar end = new GregorianCalendar();
@Test
//@@author A0143378Y
public void test() {
start.set(2014, 03, 27, 03, 14, 20);
end.set(2015, 02, 05, 07, 11, 13);
addEvent();
addDeadline();
addTask();
testOtherMethods();
}
//@@author A0143378Y
private void addEvent() {
GenericMemory testEvent = new GenericMemory("event", "name", "description", start, end, 0);
assertEquals("Type", testEvent.getType(), "event");
assertEquals("Name", testEvent.getName(), "name");
assertEquals("Description", testEvent.getDescription(), "description");
assertEquals("start time and date", testEvent.getStart(), start);
assertEquals("end time and date", testEvent.getEnd(), end);
}
//@@author A0143378Y
private void addDeadline() {
GenericMemory testDeadline = new GenericMemory("deadline", "name1", "description1", end);
assertEquals("Type", testDeadline.getType(), "deadline");
assertEquals("Name", testDeadline.getName(), "name1");
assertEquals("Description", testDeadline.getDescription(), "description1");
assertEquals("start time and date", testDeadline.getStart(), null);
assertEquals("end time and date", testDeadline.getEnd(), end);
}
//@@author A0143378Y
private void addTask() {
GenericMemory testTask = new GenericMemory("task", "name2", "description2");
assertEquals("Type", testTask.getType(), "task");
assertEquals("Name", testTask.getName(), "name2");
assertEquals("Description", testTask.getDescription(), "description2");
assertEquals("start time and date", testTask.getStart(), null);
assertEquals("end time and date", testTask.getEnd(), null);
}
//@@author A0143378Y
private void testOtherMethods() {
GenericMemory testMethod = new GenericMemory("Method", "name3", "description3");
assertEquals("start time and date", testMethod.getStart(), null);
testMethod.initStart();
assertFalse("start time and date", testMethod.getStart() == null);
assertEquals("end time and date", testMethod.getEnd(), null);
testMethod.initEnd();
assertFalse("start time and date", testMethod.getEnd() == null);
testSetType(testMethod);
testSetName(testMethod);
testSetDescription(testMethod);
testTime(testMethod);
testDate(testMethod);
testNewState(testMethod);
testToString();
testDays();
testCompare();
testStateType();
}
//@@author A0143378Y
private void testSetType(GenericMemory testMethod) {
testMethod.setType("cat");
assertEquals("type", testMethod.getType(), "cat");
}
//@@author A0143378Y
private void testSetName(GenericMemory testMethod) {
testMethod.setName("Animals");
assertEquals("Name", testMethod.getName(),"Animals");
}
//@@author A0143378Y
private void testSetDescription(GenericMemory testMethod) {
testMethod.setDescription("An organism");
assertEquals("Description", testMethod.getDescription(), "An organism");
}
//@@author A0143378Y
private void testTime(GenericMemory testMethod) {
//2 parameters
testMethod.setStartTime(03, 14);
testMethod.setEndTime(07, 11);
//3 parameters
testMethod.setStartTime(03, 14, 19);
testMethod.setEndTime(07, 11, 20);
}
//@@author A0143378Y
private void testDate(GenericMemory testMethod) {
testMethod.setStartDate(2014, 03, 27);
testMethod.setEndDate(2015, 02, 05);
}
//@@author A0143378Y
private void testNewState(GenericMemory testMethod) {
testMethod.setState(0);
assertEquals("State", testMethod.getState(), 0);
}
//@@author A0143378Y
private void testToString() {
taskToString();
deadlineToString();
eventToString();
}
//@@author A0143378Y
private void taskToString() {
//Task
GenericMemory testTask = new GenericMemory("Task", "name2", "description2");
String taskResult = "Type: Task" + "\n" + "Name: name2" + "\n" + "Description : description2" + "\n" + "Status: Incomplete";
assertEquals("Task to String", testTask.toString(), taskResult);
//Task description
String descriptionResult = "description2" + "\n" + "Description : description2";
assertEquals("Description", testTask.descriptionToString(testTask.getDescription()), descriptionResult);
//Task deadline to string
String taskDeadline = "Type: Task" + "\n" + "Name: name2" + "\n" + "Description : description2"
+ "\n" + "Status: Incomplete" + "\n" + "Status: Incomplete";
assertEquals("Deadline", testTask.taskDeadlineStateToString(testTask.toString()), taskDeadline);
//Set state to Completed
testTask.setState(1);
String taskDeadline2 = "Type: Task" + "\n" + "Name: name2" + "\n" + "Description : description2"
+ "\n" + "Status: Completed" + "\n" + "Status: Completed";
assertEquals("Deadline", testTask.taskDeadlineStateToString(testTask.toString()), taskDeadline2);
//Set state to overdue
testTask.setState(2);
String taskDeadline3 = "Type: Task" + "\n" + "Name: name2" + "\n" + "Description : description2"
+ "\n" + "Status: Overdue" + "\n" + "Status: Overdue";
assertEquals("Deadline", testTask.taskDeadlineStateToString(testTask.toString()), taskDeadline3);
}
//@@author A0143378Y
private void deadlineToString() {
//Deadline
GenericMemory testDeadline = new GenericMemory("Deadline", "name1", "description1", end);
String deadlineResult = "Type: Deadline" + "\n" + "Name: name1" + "\n" + "Description : description1"
+ "\n" + "Due by: 5/3/15 Thurs 07:11 AM" + "\n" + "Status: Incomplete";
assertEquals("Deadline to String", testDeadline.toString(), deadlineResult);
//Deadline date
String deadlineEndDateResult = testDeadline.deadlineDateToString(testDeadline.getEnd().toString());
assertEquals("Deadline End Date", testDeadline.deadlineDateToString(testDeadline.getEnd().toString()), deadlineEndDateResult);
}
//@@author A0143378Y
private void eventToString() {
//Event
GenericMemory testEvent = new GenericMemory("Event", "name", "description", start, end, 0);
String eventResult = "Type: Event" + "\n" + "Name: name" + "\n" + "Description : description"
+ "\n" + "Start: 27/4/14 Sun 03:14 AM" + "\n" + "End: 5/3/15 Thurs 07:11 AM"
+ "\n" + "Status: Upcoming";
assertEquals("Event to String", testEvent.toString(), eventResult);
//Event dates to string
String eventDates = "Type: Event" + "\n" + "Name: name" + "\n" + "Description : description"
+ "\n" + "Start: 27/4/14 Sun 03:14 AM" + "\n" + "End: 5/3/15 Thurs 07:11 AM"
+ "\n" + "Status: Upcoming"
+ "\n" + "Start: 27/4/14 Sun 03:14 AM" + "\n" + "End: 5/3/15 Thurs 07:11 AM";
assertEquals("Event dates to String", testEvent.eventDatesToString(testEvent.toString()), eventDates);
//Event state to string
String eventState = "Type: Event" + "\n" + "Name: name" + "\n" + "Description : description"
+ "\n" + "Start: 27/4/14 Sun 03:14 AM" + "\n" + "End: 5/3/15 Thurs 07:11 AM"
+ "\n" + "Status: Upcoming" + "\n" + "Status: Upcoming";
assertEquals("Event state to String", testEvent.eventStateToString(testEvent.toString()), eventState);
//Set State to over
testEvent.setState(1);
String eventState2 = "Type: Event" + "\n" + "Name: name" + "\n" + "Description : description"
+ "\n" + "Start: 27/4/14 Sun 03:14 AM" + "\n" + "End: 5/3/15 Thurs 07:11 AM"
+ "\n" + "Status: Over" + "\n" + "Status: Over";
assertEquals("Event state to String", testEvent.eventStateToString(testEvent.toString()), eventState2);
//Set state to be ongoing
testEvent.setState(2);
String eventState3 = "Type: Event" + "\n" + "Name: name" + "\n" + "Description : description"
+ "\n" + "Start: 27/4/14 Sun 03:14 AM" + "\n" + "End: 5/3/15 Thurs 07:11 AM"
+ "\n" + "Status: Ongoing" + "\n" + "Status: Ongoing";
assertEquals("Event state to String", testEvent.eventStateToString(testEvent.toString()), eventState3);
//test getDate
assertEquals("Event date", testEvent.getDate(start), "27/4/14 Sun");
//test getDate null
Calendar invalidDate = new GregorianCalendar();
invalidDate = null;
assertEquals("Invalid event Date", testEvent.getDate(invalidDate), null);
//test getTime
assertEquals("Event end time", testEvent.getTime(end), "07:11 AM");
//test getTime null
Calendar invalidTime = new GregorianCalendar();
invalidTime = null;
assertEquals("Invalid event time", testEvent.getTime(invalidTime), null);
}
//@@author A0143378Y
private void testDays() {
Calendar Sunday = new GregorianCalendar();
Sunday.set(2014, 03, 20, 03, 14, 20);
assertEquals("Sunday", GenericMemory.dayOfTheWeek(Sunday), "Sun");
//test AM_PM
assertEquals("AM", GenericMemory.AM_PM(Sunday), "AM");
Calendar Monday = new GregorianCalendar();
Monday.set(2014, 03, 21, 13, 01, 20);
assertEquals("Monday", GenericMemory.dayOfTheWeek(Monday), "Mon");
//test AM_PM
assertEquals("PM", GenericMemory.AM_PM(Monday), "PM");
//Test HH
assertEquals("Hour", GenericMemory.hour(Monday), "01");
//Test MM
assertEquals("Minute", GenericMemory.min(Monday), "01");
Calendar Tuesday = new GregorianCalendar();
Tuesday.set(2014, 03, 22, 03, 14, 20);
assertEquals("Tuesday", GenericMemory.dayOfTheWeek(Tuesday), "Tues");
//Test HH
assertEquals("Hour", GenericMemory.hour(Tuesday), "03");
//Test MM
assertEquals("Minute", GenericMemory.min(Tuesday), "14");
Calendar Wednesday = new GregorianCalendar();
Wednesday.set(2014, 03, 23, 12, 14, 20);
assertEquals("Wednesday", GenericMemory.dayOfTheWeek(Wednesday), "Wed");
//Test HH
assertEquals("Hour", GenericMemory.hour(Wednesday), "12");
Calendar Thursday = new GregorianCalendar();
Thursday.set(2014, 03, 24, 03, 14, 20);
assertEquals("Thursday", GenericMemory.dayOfTheWeek(Thursday), "Thurs");
Calendar Friday = new GregorianCalendar();
Friday.set(2014, 03, 25, 03, 14, 20);
assertEquals("Friday", GenericMemory.dayOfTheWeek(Friday), "Fri");
Calendar Saturday = new GregorianCalendar();
Saturday.set(2014, 03, 26, 03, 14, 20);
assertEquals("Saturday", GenericMemory.dayOfTheWeek(Saturday), "Sat");
}
//@@author A0143378Y
private void testCompare() {
testCompareTask();
testCompareDeadline();
testCompareEvent();
}
//@@author A0143378Y
private void testCompareTask() {
GenericMemory testTask1 = new GenericMemory("task", "name2", "description2");
GenericMemory testTask2 = new GenericMemory("task", "name3", "description3");
assertEquals("Tasks", testTask1.compareTo(testTask2), -1);
}
//@@author A0143378Y
private void testCompareDeadline() {
GenericMemory testDeadline1 = new GenericMemory("deadline", "name1", "description1", end);
GenericMemory testDeadline2 = new GenericMemory("deadline2", "name2", "description2", end);
assertEquals("Deadlines", testDeadline1.compareTo(testDeadline2), 0);
}
//@@author A0143378Y
private void testCompareEvent() {
GenericMemory testEvent1 = new GenericMemory("event", "name", "description", start, end, 0);
GenericMemory testEvent2 = new GenericMemory("event2", "name2", "description2", start, end, 0);
assertEquals("Events", testEvent1.compareTo(testEvent2), 0);
assertEquals("Event Comparison", testEvent1.eventCompare(testEvent2), 0);
}
//@@author A0143378Y
private void testStateType() {
testTaskState();
testDeadlineState();
testEventState();
testIncorrectState();
}
//@@author A0143378Y
private void testTaskState() {
GenericMemory testTask1 = new GenericMemory("Task", "name2", "description2");
assertEquals("No state yet", testTask1.getStateType(), "Incomplete");
}
//@@author A0143378Y
private void testDeadlineState() {
GenericMemory testDeadline = new GenericMemory("Deadline", "name1", "description1", end);
//Set state to incomplete
testDeadline.setState(0);
assertEquals("Incomplete", testDeadline.getStateType(), "Incomplete");
//Set state to Completed
testDeadline.setState(1);
assertEquals("Completed", testDeadline.getStateType(), "Completed");
//Set state to Overdue
testDeadline.setState(2);
assertEquals("Overdue", testDeadline.getStateType(), "Overdue");
}
//@@author A0143378Y
private void testEventState() {
GenericMemory testEvent = new GenericMemory("Event", "name", "description", start, end, 0);
//Upcoming
assertEquals("Upcoming", testEvent.getStateType(), "Upcoming");
//Set state to over
testEvent.setState(1);
assertEquals("Over", testEvent.getStateType(), "Over");
//Set state to ongoing
testEvent.setState(2);
assertEquals("Ongoing", testEvent.getStateType(), "Ongoing");
}
//@@author A0143378Y
private void testIncorrectState() {
GenericMemory wrongState = new GenericMemory("aask", "name2", "description2");
assertEquals("Wrong", wrongState.getStateType(), null);
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.plugin.processors.sleigh;
import java.io.*;
import java.math.BigInteger;
import java.util.*;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.antlr.runtime.RecognitionException;
import org.jdom.JDOMException;
import org.xml.sax.*;
import generic.jar.ResourceFile;
import generic.stl.Pair;
import ghidra.app.plugin.processors.generic.MemoryBlockDefinition;
import ghidra.app.plugin.processors.sleigh.expression.ContextField;
import ghidra.app.plugin.processors.sleigh.expression.PatternValue;
import ghidra.app.plugin.processors.sleigh.symbol.*;
import ghidra.framework.Application;
import ghidra.pcodeCPort.sleighbase.SleighBase;
import ghidra.pcodeCPort.slgh_compile.SleighCompileLauncher;
import ghidra.program.model.address.*;
import ghidra.program.model.lang.*;
import ghidra.program.model.listing.DefaultProgramContext;
import ghidra.program.model.mem.MemBuffer;
import ghidra.program.model.mem.MemoryAccessException;
import ghidra.program.model.symbol.SourceType;
import ghidra.program.model.util.AddressLabelInfo;
import ghidra.program.model.util.ProcessorSymbolType;
import ghidra.sleigh.grammar.SleighPreprocessor;
import ghidra.sleigh.grammar.SourceFileIndexer;
import ghidra.util.*;
import ghidra.util.task.TaskMonitor;
import ghidra.util.xml.SpecXmlUtils;
import ghidra.xml.*;
import utilities.util.FileResolutionResult;
import utilities.util.FileUtilities;
public class SleighLanguage implements Language {
/**
* SLA_FORMAT_VERSION will be incremented whenever the format of the .sla
* files change.
* <p>
* Version 3: January 2021: added source file information for each constructor. <br>
* Version 2: April 2019: Changed numbering of Overlay spaces.<br>
* Version 1: Initial version.<br>
*/
public static final int SLA_FORMAT_VERSION = SleighBase.SLA_FORMAT_VERSION;
private Map<CompilerSpecID, SleighCompilerSpecDescription> compilerSpecDescriptions;
private HashMap<CompilerSpecID, BasicCompilerSpec> compilerSpecs;
private List<InjectPayloadSleigh> additionalInject = null;
private AddressFactory addressFactory;
private AddressSpace defaultDataSpace;
private RegisterBuilder registerBuilder;
private MemoryBlockDefinition[] defaultMemoryBlocks;
private Register programCounter;
private List<AddressLabelInfo> defaultSymbols;
private long uniqueBase; // First free offset within the unique space
private int uniqueAllocateMask = 0; // Number of bytes between allocations within the unique space
private int numSections = 0; // Number of named sections for this language
private int alignment = 1;
private int defaultPointerWordSize = 1; // Default wordsize to send down with pointer data-types
private SleighLanguageDescription description;
private ParallelInstructionLanguageHelper parallelHelper;
private SourceFileIndexer indexer; //used to provide source file info for constructors
/**
* Symbols used by sleigh
*/
private SymbolTable symtab = null;
/**
* Non-null if a space should yes segmented addressing
*/
String segmentedspace = "";
String segmentType = "";
AddressSet volatileAddresses;
private ContextCache contextcache = null;
/**
* Cached instruction prototypes
*/
private LinkedHashMap<Integer, SleighInstructionPrototype> instructProtoMap;
private DecisionNode root = null;
/**
* table of AddressSpaces
*/
LinkedHashMap<String, AddressSpace> spacetable;
private AddressSpace default_space;
private List<ContextSetting> ctxsetting = new ArrayList<>();
private LinkedHashMap<String, String> properties = new LinkedHashMap<>();
SortedMap<String, ManualEntry> manual = null;
SleighLanguage(SleighLanguageDescription description)
throws SAXException, IOException, UnknownInstructionException {
initialize(description);
}
private void addAdditionInject(InjectPayloadSleigh payload) {
if (additionalInject == null) {
additionalInject = new ArrayList<>();
}
additionalInject.add(payload);
}
private void initialize(SleighLanguageDescription langDescription)
throws SAXException, IOException, UnknownInstructionException {
this.defaultSymbols = new ArrayList<>();
this.compilerSpecDescriptions = new LinkedHashMap<>();
for (CompilerSpecDescription compilerSpecDescription : langDescription
.getCompatibleCompilerSpecDescriptions()) {
this.compilerSpecDescriptions.put(compilerSpecDescription.getCompilerSpecID(),
(SleighCompilerSpecDescription) compilerSpecDescription);
}
compilerSpecs = new HashMap<>();
this.description = langDescription;
additionalInject = null;
SleighLanguageValidator.validatePspecFile(langDescription.getSpecFile());
readInitialDescription();
// should addressFactory and registers initialization be done at
// construction time?
// for now we'll assume yes.
contextcache = new ContextCache();
ResourceFile slaFile = langDescription.getSlaFile();
if (!slaFile.exists() ||
(slaFile.canWrite() && (isSLAWrongVersion(slaFile) || isSLAStale(slaFile)))) {
reloadLanguage(TaskMonitor.DUMMY, true);
}
// Read in the sleigh specification
readSpecification(slaFile);
registerBuilder = new RegisterBuilder();
loadRegisters(registerBuilder);
readRemainingSpecification();
xrefRegisters();
instructProtoMap = new LinkedHashMap<>();
initParallelHelper();
}
private boolean isSLAWrongVersion(ResourceFile slaFile) {
XmlPullParser parser = null;
try {
parser = XmlPullParserFactory.create(slaFile, new ErrorHandler() {
@Override
public void warning(SAXParseException exception) throws SAXException {
// ignore
}
@Override
public void fatalError(SAXParseException exception) throws SAXException {
throw exception;
}
@Override
public void error(SAXParseException exception) throws SAXException {
throw exception;
}
}, false);
XmlElement e = parser.peek();
if (!"sleigh".equals(e.getName())) {
return true;
}
int version = SpecXmlUtils.decodeInt(e.getAttribute("version"));
return (version != SLA_FORMAT_VERSION);
}
catch (SAXException | IOException e) {
return true;
}
finally {
if (parser != null) {
parser.dispose();
}
}
}
private boolean isSLAStale(ResourceFile slaFile) {
String slafilename = slaFile.getName();
int index = slafilename.lastIndexOf('.');
String slabase = slafilename.substring(0, index);
String slaspecfilename = slabase + ".slaspec";
ResourceFile slaspecFile = new ResourceFile(slaFile.getParentFile(), slaspecfilename);
File resourceAsFile = slaspecFile.getFile(true);
SleighPreprocessor preprocessor =
new SleighPreprocessor(new ModuleDefinitionsAdapter(), resourceAsFile);
long sourceTimestamp = Long.MAX_VALUE;
try {
sourceTimestamp = preprocessor.scanForTimestamp();
}
catch (Exception e) {
// squash the error because we will force recompilation and errors
// will propagate elsewhere
}
long compiledTimestamp = slaFile.lastModified();
return (sourceTimestamp > compiledTimestamp);
}
/**
* Returns the unique base offset from which additional temporary variables
* may be created.
* @return unique base offset
*/
public long getUniqueBase() {
return uniqueBase;
}
public int getUniqueAllocationMask() {
return uniqueAllocateMask;
}
/**
* @return (maximum) number of named p-code sections
*/
public int numSections() {
return numSections;
}
@Override
public String toString() {
return description.toString();
}
private RegisterManager registerManager = null;
private RegisterManager getRegisterManager() {
if (registerManager == null) {
registerManager = registerBuilder.getRegisterManager();
}
return registerManager;
}
@Override
public void applyContextSettings(DefaultProgramContext programContext) {
for (ContextSetting cs : ctxsetting) {
RegisterValue registerValue = new RegisterValue(cs.getRegister(), cs.getValue());
programContext.setDefaultValue(registerValue, cs.getStartAddress(), cs.getEndAddress());
}
}
@Override
public AddressFactory getAddressFactory() {
return addressFactory;
}
public List<InjectPayloadSleigh> getAdditionalInject() {
return additionalInject;
}
@Override
public Register getContextBaseRegister() {
return getRegisterManager().getContextBaseRegister();
}
@Override
public List<Register> getContextRegisters() {
return getRegisterManager().getContextRegisters();
}
@Override
public MemoryBlockDefinition[] getDefaultMemoryBlocks() {
return defaultMemoryBlocks;
}
@Override
public Register getProgramCounter() {
return programCounter;
}
@Override
public List<AddressLabelInfo> getDefaultSymbols() {
return defaultSymbols;
}
@Override
public int getInstructionAlignment() {
return alignment;
}
@Override
public int getMinorVersion() {
return description.getMinorVersion();
}
@Override
public LanguageID getLanguageID() {
return description.getLanguageID();
}
@Override
public String getUserDefinedOpName(int index) {
return symtab.getUserDefinedOpName(index);
}
@Override
public int getNumberOfUserDefinedOpNames() {
return symtab.getNumberOfUserDefinedOpNames();
}
@Override
public Processor getProcessor() {
return description.getProcessor();
}
@Override
public Register getRegister(AddressSpace addrspc, long offset, int size) {
return getRegister(addrspc.getAddress(offset), size);
}
@Override
public Register getRegister(String name) {
return getRegisterManager().getRegister(name);
}
@Override
public Register getRegister(Address addr, int size) {
return getRegisterManager().getRegister(addr, size);
}
@Override
public Register[] getRegisters(Address address) {
return getRegisterManager().getRegisters(address);
}
@Override
public List<Register> getRegisters() {
return getRegisterManager().getRegisters();
}
@Override
public List<String> getRegisterNames() {
return getRegisterManager().getRegisterNames();
}
@Override
public String getSegmentedSpace() {
return segmentedspace;
}
@Override
public int getVersion() {
return description.getVersion();
}
@Override
public AddressSetView getVolatileAddresses() {
return volatileAddresses;
}
@Override
public boolean isBigEndian() {
return description.getEndian().isBigEndian();
}
@Override
public boolean isVolatile(Address addr) {
if (volatileAddresses != null) {
return volatileAddresses.contains(addr);
}
return false;
}
@Override
public InstructionPrototype parse(MemBuffer buf, ProcessorContext context, boolean inDelaySlot)
throws InsufficientBytesException, UnknownInstructionException {
if (alignment != 1) {
if (buf.getAddress().getOffset() % alignment != 0) {
throw new UnknownInstructionException(
"Instructions must be aligned on " + alignment + "byte boundary.");
}
}
SleighInstructionPrototype res = null;
try {
SleighInstructionPrototype newProto =
new SleighInstructionPrototype(this, buf, context, contextcache, inDelaySlot, null);
Integer hashcode = newProto.hashCode();
if (!instructProtoMap.containsKey(hashcode)) {
newProto.cacheInfo(buf, context, true);
}
synchronized (instructProtoMap) {
res = instructProtoMap.get(hashcode);
if (res == null) { // We have a prototype we have never seen
// before, build it fully
instructProtoMap.put(hashcode, newProto);
res = newProto;
}
if (inDelaySlot && res.hasDelaySlots()) {
throw new NestedDelaySlotException();
}
}
}
catch (MemoryAccessException e) {
throw new InsufficientBytesException(e.getMessage());
}
try {
SleighParserContext protoContext = res.getParserContext(buf, context);
protoContext.applyCommits(context);
}
catch (Exception e) {
throw new UnknownInstructionException();
}
return res;
}
public DecisionNode getRootDecisionNode() {
return root;
}
public SymbolTable getSymbolTable() {
return symtab;
}
/**
* Returns the source file indexer
* @return indexer
*/
public SourceFileIndexer getSourceFileIndexer() {
return indexer;
}
@Override
public void reloadLanguage(TaskMonitor monitor) throws IOException {
reloadLanguage(monitor, false);
}
private void reloadLanguage(TaskMonitor monitor, boolean calledFromInitialize)
throws IOException {
if (monitor == null) {
monitor = TaskMonitor.DUMMY;
}
monitor.setMessage("Compiling Language File...");
ResourceFile slaFile = description.getSlaFile();
String slaName = slaFile.getName();
int index = slaName.lastIndexOf('.');
String specName = slaName.substring(0, index);
String languageName = specName + ".slaspec";
ResourceFile languageFile = new ResourceFile(slaFile.getParentFile(), languageName);
// see gradle/processorUtils.gradle for sleighArgs.txt generation
ResourceFile sleighArgsFile = null;
ResourceFile languageModule = Application.getModuleContainingResourceFile(languageFile);
if (languageModule != null) {
if (SystemUtilities.isInReleaseMode()) {
sleighArgsFile = new ResourceFile(languageModule, "data/sleighArgs.txt");
}
else {
sleighArgsFile = new ResourceFile(languageModule, "build/tmp/sleighArgs.txt");
}
}
String[] args;
if (sleighArgsFile != null && sleighArgsFile.isFile()) {
String baseDir = Application.getInstallationDirectory()
.getAbsolutePath()
.replace(File.separatorChar, '/');
if (!baseDir.endsWith("/")) {
baseDir += "/";
}
args = new String[] { "-DBaseDir=" + baseDir, "-i", sleighArgsFile.getAbsolutePath(),
languageFile.getAbsolutePath(), description.getSlaFile().getAbsolutePath() };
}
else {
args = new String[] { languageFile.getAbsolutePath(),
description.getSlaFile().getAbsolutePath() };
}
try {
StringBuilder buf = new StringBuilder();
for (String str : args) {
buf.append(str);
buf.append(" ");
}
Msg.debug(this, "Sleigh compile: " + buf);
int returnCode = SleighCompileLauncher.runMain(args);
if (returnCode != 0) {
throw new SleighException("Errors compiling " + languageFile.getAbsolutePath() +
" -- please check log messages for details");
}
}
catch (JDOMException e) {
throw new IOException("JDOMException error recompiling: " + e.getMessage());
}
catch (RecognitionException e) {
throw new IOException("RecognitionException error recompiling: " + e.getMessage());
}
if (!calledFromInitialize) {
monitor.setMessage("Reloading Language...");
try {
initialize(description);
}
catch (SAXException e) {
throw new IOException(e.getMessage());
}
catch (UnknownInstructionException e) {
throw new IOException(e.getMessage());
}
}
}
@Override
public boolean supportsPcode() {
return true;
}
private ErrorHandler SPEC_ERR_HANDLER = new ErrorHandler() {
@Override
public void error(SAXParseException exception) throws SAXException {
Msg.error(SleighLanguage.this, "Error parsing " + description.getSpecFile(), exception);
}
@Override
public void fatalError(SAXParseException exception) throws SAXException {
Msg.error(SleighLanguage.this, "Fatal error parsing " + description.getSpecFile(),
exception);
}
@Override
public void warning(SAXParseException exception) throws SAXException {
Msg.warn(SleighLanguage.this, "Warning parsing " + description.getSpecFile(),
exception);
}
};
private void readInitialDescription() throws SAXException, IOException {
ResourceFile specFile = description.getSpecFile();
XmlPullParser parser = XmlPullParserFactory.create(specFile, SPEC_ERR_HANDLER, false);
try {
XmlElement nextElement = parser.peek();
while (nextElement != null && !nextElement.getName().equals("segmented_address")) {
parser.next(); // skip element
nextElement = parser.peek();
}
if (nextElement != null) {
XmlElement element = parser.start(); // segmented_address element
segmentedspace = element.getAttribute("space");
segmentType = element.getAttribute("type");
if (segmentType == null) {
segmentType = "";
}
}
}
finally {
parser.dispose();
}
}
private void setDefaultDataSpace(String spaceName) {
if (spaceName == null) {
return;
}
AddressSpace addressSpace = addressFactory.getAddressSpace(spaceName);
if (addressSpace == null || !addressSpace.isLoadedMemorySpace()) {
Msg.error(this,
"unknown/invalid BSS space " + spaceName + ": " + description.getSpecFile());
return;
}
defaultDataSpace = addressSpace;
defaultPointerWordSize = defaultDataSpace.getAddressableUnitSize();
}
private void setProgramCounter(String programCounterName) {
if (programCounterName == null) {
return;
}
Register reg = registerBuilder.getRegister(programCounterName);
if (reg == null) {
Msg.error(this, "unknown program counter register " + programCounterName + ": " +
description.getSpecFile());
return;
}
registerBuilder.setFlag(programCounterName, Register.TYPE_PC);
programCounter = reg;
}
private void addContextSetting(Register reg, BigInteger value, Address begad, Address endad) {
ctxsetting.add(new ContextSetting(reg, value, begad, endad));
}
private Pair<Address, Address> parseRange(XmlElement element) {
String space = element.getAttribute("space");
AddressSpace addrspace = spacetable.get(space);
if (addrspace == null) {
throw new SleighException("Invalid address space name: " + space);
}
long first = 0;
long last = addrspace.getMaxAddress().getOffset();
String valstring = element.getAttribute("first");
if (valstring != null) {
first = SpecXmlUtils.decodeLong(valstring);
}
valstring = element.getAttribute("last");
if (valstring != null) {
last = SpecXmlUtils.decodeLong(valstring);
}
return new Pair<>(addrspace.getAddress(first), addrspace.getAddress(last));
}
private void read(XmlPullParser parser) throws XmlParseException {
Set<String> registerDataSet = new HashSet<>();
XmlElement el = parser.start("processor_spec");
while (parser.peek().isStart()) {
String elName = parser.peek().getName();
if (elName.equals("properties")) {
XmlElement subel = parser.start();
while (!parser.peek().isEnd()) {
XmlElement next = parser.start("property");
String key = next.getAttribute("key");
String value = next.getAttribute("value");
properties.put(key, value);
parser.end(next);
}
parser.end(subel);
}
else if (elName.equals("programcounter")) {
XmlElement subel = parser.start();
setProgramCounter(subel.getAttribute("register"));
parser.end(subel);
}
else if (elName.equals("data_space")) {
XmlElement subel = parser.start();
setDefaultDataSpace(subel.getAttribute("space"));
String overrideString = subel.getAttribute("ptr_wordsize");
if (overrideString != null) {
int val = SpecXmlUtils.decodeInt(overrideString);
if (val <= 0 || val >= 32) {
throw new SleighException("Bad ptr_wordsize attribute");
}
defaultPointerWordSize = val;
}
parser.end(subel);
}
else if (elName.equals("context_data")) {
XmlElement subel = parser.start();
while (!parser.peek().isEnd()) {
XmlElement next = parser.start();
boolean isContext = next.getName().equals("context_set");
Pair<Address, Address> range = parseRange(next);
while (parser.peek().getName().equals("set")) {
XmlElement set = parser.start();
String name = set.getAttribute("name");
String sValue = set.getAttribute("val");
int radix = 10;
if (sValue.startsWith("0x") || sValue.startsWith("0X")) {
sValue = sValue.substring(2);
radix = 16;
}
BigInteger val;
try {
val = new BigInteger(sValue, radix);
}
catch (Exception e) {
val = BigInteger.valueOf(0);
}
Register reg = registerBuilder.getRegister(name);
boolean test;
if (isContext) {
test = reg == null || !reg.isProcessorContext();
}
else {
test = reg == null || reg.isProcessorContext();
}
if (test) {
throw new SleighException("Bad register name: " + name);
}
addContextSetting(reg, val, range.first, range.second);
// skip the end tag
parser.end(set);
}
// skip the end tag
parser.end(next);
}
parser.end(subel);
}
else if (elName.equals("volatile")) {
XmlElement subel = parser.start();
while (!parser.peek().getName().equals("volatile")) {
XmlElement next = parser.start();
if (next.getName().equals("register")) {
throw new SleighException("no support for volatile registers yet");
}
Pair<Address, Address> range = parseRange(next);
if (volatileAddresses == null) {
volatileAddresses = new AddressSet();
}
volatileAddresses.addRange(range.first, range.second);
// skip the end tag
parser.end(next);
}
parser.end(subel);
}
else if (elName.equals("jumpassist")) {
XmlElement subel = parser.start();
String source = "pspec: " + getLanguageID().getIdAsString();
String name = subel.getAttribute("name");
while (parser.peek().isStart()) {
InjectPayloadSleigh payload = new InjectPayloadJumpAssist(name, source);
payload.restoreXml(parser, this);
addAdditionInject(payload);
}
parser.end(subel);
}
else if (elName.equals("register_data")) {
XmlElement subel = parser.start();
while (parser.peek().getName().equals("register")) {
XmlElement reg = parser.start();
String registerName = reg.getAttribute("name");
String registerRename = reg.getAttribute("rename");
String registerAlias = reg.getAttribute("alias");
String groupName = reg.getAttribute("group");
boolean isHidden = SpecXmlUtils.decodeBoolean(reg.getAttribute("hidden"));
if (registerRename != null) {
if (!registerBuilder.renameRegister(registerName, registerRename)) {
throw new SleighException(
"error renaming " + registerName + " to " + registerRename);
}
registerName = registerRename;
}
Register register = registerBuilder.getRegister(registerName);
if (register != null) {
if (!registerDataSet.add(registerName)) {
Msg.error(this, "duplicate register " + registerName + ": " +
description.getSpecFile());
}
if (registerAlias != null) {
registerBuilder.addAlias(registerName, registerAlias);
}
if (groupName != null) {
registerBuilder.setGroup(registerName, groupName);
}
if (isHidden) {
registerBuilder.setFlag(registerName, Register.TYPE_HIDDEN);
}
String sizes = reg.getAttribute("vector_lane_sizes");
if (sizes != null) {
String[] lanes = sizes.split(",");
for (String lane : lanes) {
int laneSize = SpecXmlUtils.decodeInt(lane.trim());
registerBuilder.addLaneSize(registerName, laneSize);
}
}
}
else {
Msg.error(this,
"unknown register " + registerName + ": " + description.getSpecFile());
}
// skip the end tag
parser.end(reg);
}
parser.end(subel);
}
else if (elName.equals("default_symbols")) {
XmlElement subel = parser.start();
while (parser.peek().getName().equals("symbol")) {
XmlElement symbol = parser.start();
String labelName = symbol.getAttribute("name");
String addressString = symbol.getAttribute("address");
String typeString = symbol.getAttribute("type");
ProcessorSymbolType type = ProcessorSymbolType.getType(typeString);
boolean isEntry = SpecXmlUtils.decodeBoolean(symbol.getAttribute("entry"));
Address address = addressFactory.getAddress(addressString);
if (address == null) {
Msg.error(this, "invalid symbol address \"" + addressString + "\": " +
description.getSpecFile());
}
else {
AddressLabelInfo info = new AddressLabelInfo(address, labelName, false,
null, SourceType.IMPORTED, isEntry, type);
defaultSymbols.add(info);
}
// skip the end tag
parser.end(symbol);
}
parser.end(subel);
}
else if (elName.equals("default_memory_blocks")) {
XmlElement subel = parser.start();
List<MemoryBlockDefinition> list = new ArrayList<>();
while (parser.peek().getName().equals("memory_block")) {
XmlElement mblock = parser.start();
list.add(new MemoryBlockDefinition(mblock));
// skip the end tag
parser.end(mblock);
}
parser.end(subel);
defaultMemoryBlocks = new MemoryBlockDefinition[list.size()];
list.toArray(defaultMemoryBlocks);
}
else if (elName.equals("incidentalcopy")) {
XmlElement subel = parser.start();
while (parser.peek().isStart()) {
parser.discardSubTree();
}
parser.end(subel);
}
else if (elName.equals("inferptrbounds")) {
XmlElement subel = parser.start();
while (parser.peek().isStart()) {
parser.discardSubTree();
}
parser.end(subel);
}
else if (elName.equals("segmentop")) {
String source = "pspec: " + getLanguageID().getIdAsString();
InjectPayloadSleigh payload = new InjectPayloadSegment(source);
payload.restoreXml(parser, this);
addAdditionInject(payload);
}
else if (elName.equals("segmented_address")) {
XmlElement subel = parser.start();
parser.end(subel);
}
else {
throw new XmlParseException("Unknown pspec tag: " + elName);
}
}
parser.end(el);
}
private void readRemainingSpecification() throws SAXException, IOException {
ResourceFile specFile = description.getSpecFile();
XmlPullParser parser = XmlPullParserFactory.create(specFile, SPEC_ERR_HANDLER, false);
try {
read(parser);
}
catch (XmlParseException e) {
Msg.error(this, e.getMessage());
}
finally {
parser.dispose();
}
}
private void readSpecification(final ResourceFile sleighfile)
throws SAXException, IOException, UnknownInstructionException {
ErrorHandler errHandler = new ErrorHandler() {
@Override
public void error(SAXParseException exception) throws SAXException {
Msg.error(SleighLanguage.this, "Error parsing " + sleighfile, exception);
}
@Override
public void fatalError(SAXParseException exception) throws SAXException {
Msg.error(SleighLanguage.this, "Fatal error parsing " + sleighfile, exception);
}
@Override
public void warning(SAXParseException exception) throws SAXException {
Msg.warn(SleighLanguage.this, "Warning parsing " + sleighfile, exception);
}
};
XmlPullParser parser = XmlPullParserFactory.create(sleighfile, errHandler, false);
try {
restoreXml(parser);
}
finally {
parser.dispose();
}
}
private void restoreXml(XmlPullParser parser) throws UnknownInstructionException {
XmlElement el = parser.start("sleigh");
int version = SpecXmlUtils.decodeInt(el.getAttribute("version"));
if (version != SLA_FORMAT_VERSION) {
throw new SleighException(".sla file for " + getLanguageID() + " has the wrong format");
}
String endianAttr = el.getAttribute("bigendian");
Endian slaEndian = SpecXmlUtils.decodeBoolean(endianAttr) ? Endian.BIG : Endian.LITTLE;
Endian ldefEndian = description.getEndian();
Endian instEndian = description.getInstructionEndian();
if (slaEndian != ldefEndian && instEndian == ldefEndian) {
throw new SleighException(".ldefs says " + getLanguageID() + " is " + ldefEndian +
" but .sla says " + slaEndian);
}
uniqueBase = SpecXmlUtils.decodeLong(el.getAttribute("uniqbase"));
alignment = SpecXmlUtils.decodeInt(el.getAttribute("align"));
uniqueAllocateMask = 0; // Default mask is 0
String uniqmaskstr = el.getAttribute("uniqmask");
if (uniqmaskstr != null) {
uniqueAllocateMask = SpecXmlUtils.decodeInt(uniqmaskstr);
}
String numsecstr = el.getAttribute("numsections");
if (numsecstr != null) {
numSections = SpecXmlUtils.decodeInt(numsecstr);
}
indexer = new SourceFileIndexer();
indexer.restoreXml(parser);
parseSpaces(parser);
symtab = new SymbolTable();
symtab.restoreXml(parser, this);
root =
((SubtableSymbol) symtab.getGlobalScope().findSymbol("instruction")).getDecisionNode();
parser.end(el);
}
private void parseSpaces(XmlPullParser parser) {
Set<String> truncatedSpaceNames = description.getTruncatedSpaceNames();
int truncatedSpaceCnt = truncatedSpaceNames.size();
XmlElement el = parser.start("spaces");
String defname = el.getAttribute("defaultspace");
spacetable = new LinkedHashMap<>();
// Slot zero is always the constant space
AddressSpace constspc = new GenericAddressSpace(SpaceNames.CONSTANT_SPACE_NAME, 64,
AddressSpace.TYPE_CONSTANT, SpaceNames.CONSTANT_SPACE_INDEX);
spacetable.put(SpaceNames.CONSTANT_SPACE_NAME, constspc);
default_space = null;
XmlElement subel = parser.peek();
if (subel.getName().equals("space_other")) { // tag must be present
parser.discardSubTree(); // We don't process it
// Instead the ProgramAddressFactory maps in the static OTHER_SPACE automatically
}
else {
throw new SleighException(".sla file missing required OTHER space tag");
}
while ((subel = parser.softStart("space", "space_unique")) != null) {
String name = subel.getAttribute("name");
int index = SpecXmlUtils.decodeInt(subel.getAttribute("index"));
String typename = subel.getName();
int delay = SpecXmlUtils.decodeInt(subel.getAttribute("delay"));
int size = SpecXmlUtils.decodeInt(subel.getAttribute("size"));
int type = AddressSpace.TYPE_UNKNOWN;
if (typename.equals("space")) {
if (delay > 0) {
type = AddressSpace.TYPE_RAM;
}
else {
type = AddressSpace.TYPE_REGISTER;
}
}
else if (typename.equals("space_unique")) {
type = AddressSpace.TYPE_UNIQUE;
}
if (type == AddressSpace.TYPE_UNKNOWN) {
throw new SleighException("Sleigh cannot match new space definition to old type");
}
String wSizeString = subel.getAttribute("wordsize");
int wordsize = 1;
if (wSizeString != null) {
wordsize = SpecXmlUtils.decodeInt(wSizeString);
}
boolean truncateSpace = truncatedSpaceNames.contains(name);
if (truncateSpace && type != AddressSpace.TYPE_RAM) {
throw new SleighException("Non-ram space does not support truncation: " + name);
}
AddressSpace spc;
if (getSegmentedSpace().equals(name)) {
if (truncateSpace && type != AddressSpace.TYPE_RAM) {
throw new SleighException(
"Segmented space does not support truncation: " + name);
}
if (segmentType.equals("protected")) {
spc = new ProtectedAddressSpace(name, index);
}
else {
spc = new SegmentedAddressSpace(name, index);
}
}
else {
if (truncateSpace) {
int truncatedSize = description.getTruncatedSpaceSize(name);
if (truncatedSize <= 0 || truncatedSize >= size) {
throw new SleighException("Invalid space truncation: " + name + ":" + size +
" -> " + truncatedSize);
}
size = truncatedSize;
--truncatedSpaceCnt;
}
spc = new GenericAddressSpace(name, 8 * size, wordsize, type, index);
}
spacetable.put(name, spc);
parser.end(subel);
}
if (truncatedSpaceCnt > 0) {
throw new SleighException(
"One or more truncated spaced not applied: " + description.getLanguageID());
}
default_space = spacetable.get(defname);
defaultDataSpace = default_space;
defaultPointerWordSize = defaultDataSpace.getAddressableUnitSize();
buildAddressSpaceFactory();
parser.end(el);
}
void buildAddressSpaceFactory() {
GenericAddressSpace[] spaceArray = new GenericAddressSpace[spacetable.size()];
spacetable.values().toArray(spaceArray);
addressFactory = new DefaultAddressFactory(spaceArray, default_space);
}
private void loadRegisters(RegisterBuilder builder) {
Symbol[] symbollist = symtab.getSymbolList();
for (Symbol element : symbollist) {
if (element instanceof VarnodeSymbol) {
VarnodeData vn = ((VarnodeSymbol) element).getFixedVarnode();
// TODO:
if (vn.space.getType() == AddressSpace.TYPE_REGISTER) {
Address a = vn.space.getAddress(vn.offset);
builder.addRegister(element.getName(), null, a, vn.size,
description.getEndian().isBigEndian(), 0);
}
if (vn.space.getType() == AddressSpace.TYPE_RAM) {
Address a = vn.space.getAddress(vn.offset);
builder.addRegister(element.getName(), null, a, vn.size,
description.getEndian().isBigEndian(), 0);
if (vn.space.isMemorySpace()) {
setHasMappedRegisters(vn.space);
}
}
}
else if (element instanceof VarnodeListSymbol) {
VarnodeListSymbol sym = (VarnodeListSymbol) element;
PatternValue patternValue = sym.getPatternValue();
if (patternValue instanceof ContextField) {
registerContext(sym.getName(), (ContextField) patternValue, builder);
}
}
else if (element instanceof ContextSymbol) {
ContextSymbol sym = (ContextSymbol) element;
registerContext(sym, builder);
}
}
}
private void setHasMappedRegisters(AddressSpace space) {
if (space instanceof GenericAddressSpace) {
((GenericAddressSpace) space).setHasMappedRegisters(true);
}
}
private void registerContext(String name, ContextField field, RegisterBuilder builder) {
int startbit = field.getStartBit();
int endbit = field.getEndBit();
int bitLength = endbit - startbit + 1;
int contextByteLength = (endbit / 8) + 1;
int contextBitLength = contextByteLength * 8;
int flags = Register.TYPE_CONTEXT; // assume transient context
builder.addRegister(name, name, builder.getProcessContextAddress(), contextByteLength,
contextBitLength - endbit - 1, bitLength, true, flags);
}
private void registerContext(ContextSymbol sym, RegisterBuilder builder) {
ContextField field = (ContextField) sym.getPatternValue();
int startbit = field.getStartBit();
int endbit = field.getEndBit();
int bitLength = endbit - startbit + 1;
VarnodeData vn = sym.getVarnode().getFixedVarnode();
int contextBitLength = vn.size * 8;
Address a = vn.space.getAddress(vn.offset);
int flags = Register.TYPE_CONTEXT;
if (!sym.followsFlow()) {
flags |= Register.TYPE_DOES_NOT_FOLLOW_FLOW;
}
builder.addRegister(sym.getName(), sym.getName(), a, vn.size, contextBitLength - endbit - 1,
bitLength, true, flags);
}
private void xrefRegisters() {
for (Register register : getRegisterManager().getContextRegisters()) {
contextcache.registerVariable(register);
}
}
@Override
public AddressSpace getDefaultSpace() {
return default_space;
}
@Override
public AddressSpace getDefaultDataSpace() {
return defaultDataSpace;
}
/**
* @deprecated Will be removed once we have better way to attach address spaces to pointer data-types
* @return the default wordsize to use when analyzing pointer offsets
*/
@Deprecated
public int getDefaultPointerWordSize() {
return defaultPointerWordSize;
}
@Override
public List<CompilerSpecDescription> getCompatibleCompilerSpecDescriptions() {
return description.getCompatibleCompilerSpecDescriptions();
}
@Override
public CompilerSpec getCompilerSpecByID(CompilerSpecID compilerSpecID)
throws CompilerSpecNotFoundException {
if (!compilerSpecDescriptions.containsKey(compilerSpecID)) {
throw new CompilerSpecNotFoundException(getLanguageID(), compilerSpecID);
}
SleighCompilerSpecDescription compilerSpecDescription =
compilerSpecDescriptions.get(compilerSpecID);
BasicCompilerSpec compilerSpec = compilerSpecs.get(compilerSpecID);
if (compilerSpec == null) {
compilerSpec = new BasicCompilerSpec(compilerSpecDescription, this,
compilerSpecDescription.getFile());
compilerSpecs.put(compilerSpecID, compilerSpec);
}
return compilerSpec;
}
@Override
public LanguageDescription getLanguageDescription() {
return description;
}
@Override
public CompilerSpec getDefaultCompilerSpec() {
SleighCompilerSpecDescription compilerSpecDescription =
(SleighCompilerSpecDescription) description.getCompatibleCompilerSpecDescriptions()
.iterator()
.next();
try {
return getCompilerSpecByID(compilerSpecDescription.getCompilerSpecID());
}
catch (CompilerSpecNotFoundException e) {
throw new IllegalStateException(e);
}
}
@Override
public String getProperty(String key) {
return properties.get(key);
}
@Override
public Set<String> getPropertyKeys() {
return Collections.unmodifiableSet(properties.keySet());
}
@Override
public String getProperty(String key, String defaultString) {
if (properties.containsKey(key)) {
return properties.get(key);
}
return defaultString;
}
@Override
public boolean getPropertyAsBoolean(String key, boolean defaultBoolean) {
if (properties.containsKey(key)) {
return Boolean.parseBoolean(properties.get(key));
}
return defaultBoolean;
}
@Override
public int getPropertyAsInt(String key, int defaultInt) {
if (properties.containsKey(key)) {
return Integer.parseInt(properties.get(key));
}
return defaultInt;
}
@Override
public boolean hasProperty(String key) {
return properties.containsKey(key);
}
@Override
public ManualEntry getManualEntry(String instruction) {
initManual();
if (instruction == null || instruction.length() == 0) {
return manual.get(null);
}
instruction = instruction.toUpperCase();
String firstKey = instruction.substring(0, 1);
String lastKey = Character.toString((char) (firstKey.charAt(0) + 1));
SortedMap<String, ManualEntry> tail = manual.tailMap(firstKey);
SortedMap<String, ManualEntry> subMap;
try {
subMap = tail.headMap(lastKey);
}
catch (IllegalArgumentException e) {
subMap = tail;
}
ManualEntry manualEntry = null;
int maxInCommon = -1;
Iterator<Entry<String, ManualEntry>> ii = subMap.entrySet().iterator();
while (ii.hasNext()) {
Entry<String, ManualEntry> mapEntry = ii.next();
String key = mapEntry.getKey();
if (instruction.startsWith(key) && key.length() > maxInCommon) {
manualEntry = mapEntry.getValue();
maxInCommon = key.length();
}
}
if (manualEntry == null) {
return manual.get(null);
}
return manualEntry;
}
@Override
public Set<String> getManualInstructionMnemonicKeys() {
initManual();
return Collections.unmodifiableSet(manual.keySet());
}
private Exception manualException = null;
private static final Comparator<String> CASE_INSENSITIVE = (o1, o2) -> {
if (o1 == null) {
if (o2 == null) {
return 0;
}
return -1;
}
if (o2 == null) {
return 1;
}
return o1.compareToIgnoreCase(o2);
};
private void initManual() {
if (manual == null) {
manual = new TreeMap<>(CASE_INSENSITIVE);
try {
if (description.getManualIndexFile() != null) {
loadIndex(description.getManualIndexFile());
}
}
catch (Exception e) {
manualException = e;
Msg.error(this, "error loading manual index", e);
}
}
}
private static final Pattern COMMENT = Pattern.compile("^\\s*#(.*)");
private static final Pattern FILE_INCLUDE = Pattern.compile("^\\s*<(.*)");
private static final Pattern FILE_SWITCH = Pattern.compile("^\\s*@(.*)");
private static final Pattern FILE_SWITCH_WITH_DESCRIPTION =
Pattern.compile("^\\s*@(.*)\\[(.*)\\]");
private static final Pattern INSTRUCTION = Pattern.compile("\\s*([^,]+)\\s*,\\s*(.+)");
public void loadIndex(ResourceFile processorFile) throws IOException {
ResourceFile manualDirectory = processorFile.getParentFile().getCanonicalFile();
ResourceFile currentManual = null;
ResourceFile defaultManual = null;
String missingDescription = "(no information available)";
Reader fr = null;
BufferedReader buff = null;
try {
fr = new InputStreamReader(processorFile.getInputStream());
buff = new BufferedReader(fr);
String line;
while ((line = buff.readLine()) != null) {
Matcher matcher = COMMENT.matcher(line);
if (matcher.find()) {
continue; // skip comment line
}
matcher = FILE_INCLUDE.matcher(line);
if (matcher.find()) {
String includeFilePath = matcher.group(1).trim();
ResourceFile includedIndexFile =
new ResourceFile(manualDirectory, includeFilePath);
FileResolutionResult result =
FileUtilities.existsAndIsCaseDependent(includedIndexFile);
if (!result.isOk()) {
throw new SleighException("manual index file " + includedIndexFile +
" is not properly case dependent: " + result.getMessage());
}
loadIndex(includedIndexFile);
}
else {
matcher = FILE_SWITCH_WITH_DESCRIPTION.matcher(line);
if (matcher.find()) {
if (SystemUtilities.isInDevelopmentMode()) {
// Search across repositories in development mode
currentManual = Application
.findDataFileInAnyModule("manuals/" + matcher.group(1).trim());
}
if (currentManual == null) {
currentManual =
new ResourceFile(manualDirectory, matcher.group(1).trim());
}
FileResolutionResult result =
FileUtilities.existsAndIsCaseDependent(currentManual);
missingDescription = matcher.group(2).trim();
if (defaultManual == null) {
defaultManual = currentManual;
}
if (!result.isOk()) {
// Since we do not always deliver manuals, generate warning only
Msg.warn(this,
"manual file " + currentManual +
" not found or is not properly case dependent.\n >> " +
missingDescription);
}
}
else {
matcher = FILE_SWITCH.matcher(line);
if (matcher.find()) {
currentManual =
new ResourceFile(manualDirectory, matcher.group(1).trim());
FileResolutionResult result =
FileUtilities.existsAndIsCaseDependent(currentManual);
if (!result.isOk()) {
throw new SleighException("manual file " + currentManual +
" is not properly case dependent: " + result.getMessage());
}
missingDescription = "(no information available)";
if (defaultManual == null) {
defaultManual = currentManual;
}
}
else {
matcher = INSTRUCTION.matcher(line);
if (matcher.find()) {
if (currentManual == null) {
throw new IOException("index file " + processorFile +
" does not specify manual first");
}
String mnemonic = matcher.group(1).trim().toUpperCase();
String page = matcher.group(2).trim();
ManualEntry entry = new ManualEntry(mnemonic,
currentManual.getAbsolutePath(), missingDescription, page);
manual.put(mnemonic, entry);
}
}
}
}
}
if (defaultManual != null) {
manual.put(null, new ManualEntry(null, defaultManual.getAbsolutePath(),
missingDescription, null));
}
}
finally {
if (fr != null) {
fr.close();
}
if (buff != null) {
buff.close();
}
}
}
@Override
public Exception getManualException() {
initManual();
return manualException;
}
@Override
public boolean hasManual() {
initManual();
return description.getManualIndexFile() != null && manualException == null;
}
/**
* Generates a limited translator XML tag for the specified address factory and optional register set.
* @param factory address factory
* @param uniqueOffset the initial offset within the unique address space to start assigning temporary registers
* @param optionalSymTab optional symbol table to be passed (may be null to omit). Only non-context registers
* and user-defined pcodeop's are included.
* @return the entire XML tag as a String
*/
public String buildTranslatorTag(AddressFactory factory, long uniqueOffset,
SymbolTable optionalSymTab) {
AddressSpace[] spclist = factory.getAllAddressSpaces();
StringBuilder resBuf = new StringBuilder();
resBuf.append("<sleigh");
SpecXmlUtils.encodeBooleanAttribute(resBuf, "bigendian", isBigEndian());
SpecXmlUtils.encodeUnsignedIntegerAttribute(resBuf, "uniqbase", uniqueOffset);
resBuf.append(">\n");
resBuf.append("<spaces");
SpecXmlUtils.encodeStringAttribute(resBuf, "defaultspace",
factory.getDefaultAddressSpace().getName());
resBuf.append(">\n");
String tag;
int delay;
boolean physical;
boolean global;
for (AddressSpace element : spclist) {
if ((element instanceof OverlayAddressSpace)) {
OverlayAddressSpace ospace = (OverlayAddressSpace) element;
resBuf.append("<space_overlay");
SpecXmlUtils.xmlEscapeAttribute(resBuf, "name", ospace.getName());
SpecXmlUtils.encodeSignedIntegerAttribute(resBuf, "index", ospace.getUnique());
SpecXmlUtils.encodeStringAttribute(resBuf, "base",
ospace.getOverlayedSpace().getName());
resBuf.append("/>\n");
continue;
}
switch (element.getType()) {
case AddressSpace.TYPE_RAM:
tag = "space";
delay = 1;
physical = true;
global = true;
break;
case AddressSpace.TYPE_REGISTER:
tag = "space";
delay = 0;
physical = true;
global = false;
break;
case AddressSpace.TYPE_UNIQUE:
tag = "space_unique";
delay = 0;
physical = true;
global = false;
break;
case AddressSpace.TYPE_OTHER:
tag = "space_other";
delay = 0;
physical = true;
global = true;
break;
default:
continue;
}
resBuf.append("<").append(tag);
SpecXmlUtils.encodeStringAttribute(resBuf, "name", element.getName());
SpecXmlUtils.encodeSignedIntegerAttribute(resBuf, "index", element.getUnique());
int size = element.getSize(); // Size in bits
if (element instanceof SegmentedAddressSpace) {
// TODO: SegmentedAddressSpace shouldn't really return 21
size = 32;
}
if (size > 64) {
size = 64;
}
int bytesize = (size + 7) / 8; // Convert bits to bytes
SpecXmlUtils.encodeSignedIntegerAttribute(resBuf, "size", bytesize);
if (element.getAddressableUnitSize() > 1) {
SpecXmlUtils.encodeSignedIntegerAttribute(resBuf, "wordsize",
element.getAddressableUnitSize());
}
SpecXmlUtils.encodeBooleanAttribute(resBuf, "bigendian", isBigEndian());
SpecXmlUtils.encodeSignedIntegerAttribute(resBuf, "delay", delay);
SpecXmlUtils.encodeBooleanAttribute(resBuf, "physical", physical);
SpecXmlUtils.encodeBooleanAttribute(resBuf, "global", global);
resBuf.append("/>\n");
}
resBuf.append("</spaces>\n");
SleighLanguageDescription sleighDescription =
(SleighLanguageDescription) getLanguageDescription();
Set<String> truncatedSpaceNames = sleighDescription.getTruncatedSpaceNames();
if (!truncatedSpaceNames.isEmpty()) {
for (String spaceName : truncatedSpaceNames) {
int sz = sleighDescription.getTruncatedSpaceSize(spaceName);
resBuf.append("<truncate_space");
SpecXmlUtils.encodeStringAttribute(resBuf, "space", spaceName);
SpecXmlUtils.encodeSignedIntegerAttribute(resBuf, "size", sz);
resBuf.append("/>\n");
}
}
if (optionalSymTab != null) {
resBuf.append(buildSymbolsXml(optionalSymTab));
}
resBuf.append("</sleigh>\n");
return resBuf.toString();
}
private static String buildSymbolsXml(SymbolTable symtab) {
ArrayList<Symbol> symList = new ArrayList<>();
for (Symbol sym : symtab.getSymbolList()) {
if (sym instanceof UseropSymbol) {
symList.add(sym);
}
else if (sym instanceof VarnodeSymbol) {
if ("contextreg".equals(sym.getName())) {
continue;
}
symList.add(sym);
}
}
int count = symList.size();
StringBuilder s = new StringBuilder();
s.append("<symbol_table scopesize=\"1\" symbolsize=\"");
s.append(count);
s.append("\">\n");
s.append("<scope id=\"0x0\" parent=\"0x0\"/>\n");
// First save the headers
for (int i = 0; i < count; ++i) {
Symbol sym = symList.get(i);
String type;
if (sym instanceof UseropSymbol) {
type = "userop_head";
}
else {
type = "varnode_sym_head";
}
s.append("<" + type + " name=\"" + sym.getName() + "\" id=\"0x" +
Integer.toHexString(i) + "\" scope=\"0x0\"/>\n");
}
// Now save the content of each symbol
for (int i = 0; i < count; ++i) {
Symbol sym = symList.get(i);
if (sym instanceof UseropSymbol) {
UseropSymbol opSym = (UseropSymbol) sym;
s.append("<userop name=\"" + sym.getName() + "\" id=\"0x" + Integer.toHexString(i) +
"\" scope=\"0x0\" index=\"" + opSym.getIndex() + "\"/>\n");
}
else {
VarnodeSymbol vnSym = (VarnodeSymbol) sym;
VarnodeData vn = vnSym.getFixedVarnode();
s.append(
"<varnode_sym name=\"" + sym.getName() + "\" id=\"0x" + Integer.toHexString(i) +
"\" scope=\"0x0\" space=\"" + vn.space.getName() + "\" offset=\"0x" +
Long.toHexString(vn.offset) + "\" size=\"" + vn.size + "\"/>\n");
}
}
s.append("</symbol_table>\n");
return s.toString();
}
private void initParallelHelper() {
String className =
getProperty(GhidraLanguagePropertyKeys.PARALLEL_INSTRUCTION_HELPER_CLASS);
if (className == null) {
return;
}
try {
Class<?> helperClass = Class.forName(className);
if (!ParallelInstructionLanguageHelper.class.isAssignableFrom(helperClass)) {
Msg.error(this,
"Invalid Class specified for " +
GhidraLanguagePropertyKeys.PARALLEL_INSTRUCTION_HELPER_CLASS + " (" +
helperClass.getName() + "): " + description.getSpecFile());
}
else {
parallelHelper =
(ParallelInstructionLanguageHelper) helperClass.getDeclaredConstructor()
.newInstance();
}
}
catch (Exception e) {
throw new SleighException("Failed to instantiate " +
GhidraLanguagePropertyKeys.PARALLEL_INSTRUCTION_HELPER_CLASS + " (" + className +
"): " + description.getSpecFile(), e);
}
}
@Override
public ParallelInstructionLanguageHelper getParallelInstructionHelper() {
return parallelHelper;
}
@Override
public List<Register> getSortedVectorRegisters() {
return registerManager.getSortedVectorRegisters();
}
}
| |
/**
* This software is released as part of the Pumpernickel project.
*
* All com.pump resources in the Pumpernickel project are distributed under the
* MIT License:
* https://raw.githubusercontent.com/mickleness/pumpernickel/master/License.txt
*
* More information about the Pumpernickel project is available here:
* https://mickleness.github.io/pumpernickel/
*/
package com.pump.showcase.demo;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.net.URL;
import java.util.Base64;
import javax.imageio.ImageIO;
import javax.swing.JScrollPane;
import javax.swing.JTextPane;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.text.html.StyleSheet;
import com.pump.desktop.error.ErrorManager;
import com.pump.image.jpeg.JPEGMarker;
import com.pump.image.jpeg.JPEGMetaData;
import com.pump.image.jpeg.JPEGMetaDataListener;
import com.pump.io.HTMLEncoding;
import com.pump.text.html.QHTMLEditorKit;
/**
* A simple demo for the {@link JPEGMetaData} class.
*/
public class JPEGMetaDataDemo extends ShowcaseResourceExampleDemo<URL> {
private static final long serialVersionUID = 1L;
JTextPane textPane = new JTextPane();
JPEGMetaDataListener listener = new JPEGMetaDataListener() {
String currentMarkerCode = null;
StringBuilder htmlBody = new StringBuilder();
@Override
public boolean isThumbnailAccepted(String markerName, int width,
int height) {
return true;
}
@Override
public void addProperty(String markerName, String propertyName,
Object value) {
setMarker(markerName);
htmlBody.append("<strong>" + HTMLEncoding.encode(propertyName)
+ "</strong>: " + HTMLEncoding.encode(String.valueOf(value))
+ "<br/>");
}
@Override
public void addThumbnail(String markerName, BufferedImage bi) {
setMarker(markerName);
try {
byte[] jpgBytes;
try (ByteArrayOutputStream byteOut = new ByteArrayOutputStream()) {
ImageIO.write(bi, "jpg", byteOut);
jpgBytes = byteOut.toByteArray();
}
String base64 = new String(
Base64.getEncoder().encode(jpgBytes));
htmlBody.append(
"<strong>Thumbnail</strong>: <img src=\"data:image/jpg;base64,"
+ base64 + "\"/><br>");
} catch (Exception e) {
String stacktrace = ErrorManager.getStackTrace(e);
htmlBody.append(
"<pre>" + HTMLEncoding.encode(stacktrace) + "</pre>br");
}
}
@Override
public void addComment(String markerName, String comment) {
setMarker(markerName);
htmlBody.append("<strong>Comment</strong>: "
+ HTMLEncoding.encode(String.valueOf(comment)) + "<br>");
}
private void setMarker(String markerCode) {
if (markerCode.equals(currentMarkerCode)) {
return;
}
if (currentMarkerCode != null)
htmlBody.append("</div>");
JPEGMarker marker = JPEGMarker.getMarkerForByteCode(markerCode);
String str = marker != null ? marker.name() : "Unknown";
str += " (" + markerCode + ")";
htmlBody.append(
"<h3>" + HTMLEncoding.encode(str) + " Identified: </h3>");
htmlBody.append("<div class='indented-section'>");
currentMarkerCode = markerCode;
}
@Override
public void startFile() {
htmlBody = new StringBuilder();
}
@Override
public void endFile() {
if (currentMarkerCode != null)
htmlBody.append("</div>");
htmlBody.insert(0, "<html>");
htmlBody.append("</html>");
textPane.setText(htmlBody.toString());
}
};
JPEGMetaData reader = new JPEGMetaData(listener);
public JPEGMetaDataDemo() {
super(URL.class, true, "jpg", "jpeg");
configurationLabel.setText("Input:");
exampleLabel.setText("Output:");
examplePanel.setLayout(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 0;
c.weightx = 1;
c.weighty = 1;
c.fill = GridBagConstraints.BOTH;
JScrollPane scrollPane = new JScrollPane(textPane);
scrollPane.setPreferredSize(new Dimension(500, 200));
examplePanel.add(scrollPane, c);
textPane.setEditable(false);
HTMLEditorKit kit = new QHTMLEditorKit();
textPane.setEditorKit(kit);
StyleSheet styleSheet = kit.getStyleSheet();
styleSheet.addRule(
"body { padding: 5px; font-family: sans-serif; color: black; background: white; }");
styleSheet.addRule("h3 { color: #624cab; font: bold 110%;}");
styleSheet.addRule(
".indented-section { margin-left: 10px; line-height:1.5; }");
}
@Override
public String getTitle() {
return "JPEGMetaData Demo";
}
@Override
public String getSummary() {
return "This demos the JPEGMetaData's ability to parse meta data.";
}
@Override
public URL getHelpURL() {
return JPEGMetaDataDemo.class.getResource("jpegMetaDataDemo.html");
}
@Override
public String[] getKeywords() {
return new String[] { "jpeg", "jpg", "thumbnail", "preview", "exif",
"performance" };
}
@Override
public Class<?>[] getClasses() {
return new Class[] { JPEGMetaData.class };
}
@Override
protected void refreshFile(URL url, String str) {
if (url == null) {
if (str.isEmpty()) {
textPane.setText(
"<html>Select a file or paste a URL to read the JPEG meta data.</html>");
} else {
textPane.setText("<html>Unable to read \""
+ HTMLEncoding.encode(str) + "\"</html>");
}
} else {
try (InputStream in = url.openStream()) {
reader.read(in);
} catch (Exception e) {
String stacktrace = ErrorManager.getStackTrace(e);
textPane.setText("<html>Unable to read \""
+ HTMLEncoding.encode(str) + "<p></p><pre>"
+ HTMLEncoding.encode(stacktrace) + "</pre></html>");
}
}
}
}
| |
package org.oddjob.beancmpr.results;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.oddjob.beancmpr.Comparison;
import org.oddjob.beancmpr.matchables.BeanCmprResultsHandler;
import org.oddjob.beancmpr.matchables.Matchable;
import org.oddjob.beancmpr.matchables.MatchableGroup;
import org.oddjob.beancmpr.matchables.MatchableMetaData;
import org.oddjob.beancmpr.matchables.MultiValueComparison;
import org.oddjob.beancmpr.matchables.ValueIterable;
/**
* @oddjob.description A handler that will generate results in format
* used by Dido. Dido (Data In Data Out) is currently experimental.
* It can be found at <a href="http://sourceforge.net/projects/di-do>
* http://sourceforge.net/projects/di-do</a>.
*
* @author rob
*
*/
public class DidoBeanResultHandler
implements BeanCmprResultsHandler, PlaysWithBeanbus {
/**
* @oddjob.property
* @oddjob.description A collection that result beans will be added to.
* @oddjob.required No.
*/
private Collection<? super Object> out;
/**
* @oddjob.property
* @oddjob.description If true then result beans will not be created
* when a comparison results in a match. If false result beans
* for all comparisons will be created.
* @oddjob.required No. Defaults to false.
*/
private boolean ignoreMatches;
@Override
public void compared(MultiValueComparison<Matchable> comparison) {
if (ignoreMatches && comparison.getResult() == 0) {
return;
}
out.add(createComparisonResult(comparison));
}
@Override
public void xMissing(MatchableGroup ys) {
for (Matchable y : ys.getGroup()) {
out.add(createXMissingResult(y));
}
}
@Override
public void yMissing(MatchableGroup xs) {
for (Matchable x : xs.getGroup()) {
out.add(createYMissingResult(x));
}
}
public Collection<? super Object> getOut() {
return out;
}
@Override
public void setOut(Collection<? super Object> out) {
this.out = out;
}
public boolean isIgnoreMatches() {
return ignoreMatches;
}
public void setIgnoreMatches(boolean ignoreMatches) {
this.ignoreMatches = ignoreMatches;
}
@Override
public String toString() {
return getClass().getSimpleName() +
", ignoreMatches=" + ignoreMatches;
}
protected Object createXMissingResult(Matchable y) {
List<XMissingComparison> comparisons =
new ArrayList<XMissingComparison>();
for (Object yValue : y.getValues()) {
comparisons.add(new XMissingComparison(yValue));
}
MatchableMetaData metaData = y.getMetaData();
return new DidoResultBean(
metaData,
MatchResultType.Type.X_MISSING,
keyMap(metaData.getKeyProperties(),
y.getKeys()),
comparisonMap(metaData.getValueProperties(),
comparisons)
);
}
protected Object createYMissingResult(Matchable x) {
List<YMissingComparison> comparisons =
new ArrayList<YMissingComparison>();
for (Object xValue : x.getValues()) {
comparisons.add(new YMissingComparison(xValue));
}
MatchableMetaData metaData = x.getMetaData();
return new DidoResultBean(
metaData,
MatchResultType.Type.Y_MISSING,
keyMap(metaData.getKeyProperties(),
x.getKeys()),
comparisonMap(metaData.getValueProperties(),
comparisons)
);
}
protected Object createComparisonResult(
MultiValueComparison<Matchable> matchableComparison) {
MatchableMetaData metaData =
matchableComparison.getX().getMetaData();
return new DidoResultBean(
metaData,
matchableComparison.getResult() == 0 ?
MatchResultType.Type.EQUAL :
MatchResultType.Type.NOT_EQUAL,
keyMap(metaData.getKeyProperties(),
matchableComparison.getX().getKeys()),
comparisonMap(metaData.getValueProperties(),
matchableComparison.getValueComparisons())
);
}
private Map<String, Object> keyMap(Iterable<String> keyProperties,
Iterable<?> keyValues) {
Map<String, Object> keyMap = new LinkedHashMap<String, Object>();
ValueIterable<Object> keys =
new ValueIterable<Object>(
keyProperties,
keyValues);
for (ValueIterable.Value<Object> set : keys) {
Object value = set.getValue();
keyMap.put(set.getPropertyName(), value);
}
return keyMap;
}
private Map<String, Comparison<?>> comparisonMap(
Iterable<String> comparisonProperties,
Iterable<? extends Comparison<?>> comparisonValues) {
Map<String, Comparison<?>> comparisonMap =
new LinkedHashMap<String, Comparison<?>>();
ValueIterable<Comparison<?>> comparisons =
new ValueIterable<Comparison<?>>(
comparisonProperties,
comparisonValues);
for (ValueIterable.Value<Comparison<?>> set : comparisons) {
Comparison<?> value = set.getValue();
comparisonMap.put(set.getPropertyName(), value);
}
return comparisonMap;
}
public static class XMissingComparison implements Comparison<Object> {
private final Object yValue;
public XMissingComparison(Object yValue) {
this.yValue = yValue;
}
@Override
public Object getX() {
return null;
}
@Override
public Object getY() {
return yValue;
}
@Override
public int getResult() {
return 1;
}
@Override
public String getSummaryText() {
return null;
}
}
public static class YMissingComparison implements Comparison<Object> {
private final Object xValue;
public YMissingComparison(Object xValue) {
this.xValue = xValue;
}
@Override
public Object getX() {
return xValue;
}
@Override
public Object getY() {
return null;
}
@Override
public int getResult() {
return -1;
}
@Override
public String getSummaryText() {
return null;
}
}
}
| |
/**
*/
package org.tud.inf.st.mbt.features.provider;
import java.util.ArrayList;
import java.util.Collection;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.emf.edit.provider.ChangeNotifier;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.ComposedAdapterFactory;
import org.eclipse.emf.edit.provider.IChangeNotifier;
import org.eclipse.emf.edit.provider.IDisposable;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.INotifyChangedListener;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.tud.inf.st.mbt.features.util.FeaturesAdapterFactory;
/**
* This is the factory that is used to provide the interfaces needed to support Viewers.
* The adapters generated by this factory convert EMF adapter notifications into calls to {@link #fireNotifyChanged fireNotifyChanged}.
* The adapters also support Eclipse property sheets.
* Note that most of the adapters are shared among multiple instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class FeaturesItemProviderAdapterFactory extends FeaturesAdapterFactory implements ComposeableAdapterFactory, IChangeNotifier, IDisposable {
/**
* This keeps track of the root adapter factory that delegates to this adapter factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ComposedAdapterFactory parentAdapterFactory;
/**
* This is used to implement {@link org.eclipse.emf.edit.provider.IChangeNotifier}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IChangeNotifier changeNotifier = new ChangeNotifier();
/**
* This keeps track of all the supported types checked by {@link #isFactoryForType isFactoryForType}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<Object> supportedTypes = new ArrayList<Object>();
/**
* This constructs an instance.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public FeaturesItemProviderAdapterFactory() {
supportedTypes.add(IEditingDomainItemProvider.class);
supportedTypes.add(IStructuredItemContentProvider.class);
supportedTypes.add(ITreeItemContentProvider.class);
supportedTypes.add(IItemLabelProvider.class);
supportedTypes.add(IItemPropertySource.class);
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.Feature} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected FeatureItemProvider featureItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.Feature}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createFeatureAdapter() {
if (featureItemProvider == null) {
featureItemProvider = new FeatureItemProvider(this);
}
return featureItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.ExplicitSet} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ExplicitSetItemProvider explicitSetItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.ExplicitSet}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createExplicitSetAdapter() {
if (explicitSetItemProvider == null) {
explicitSetItemProvider = new ExplicitSetItemProvider(this);
}
return explicitSetItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.Union} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected UnionItemProvider unionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.Union}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createUnionAdapter() {
if (unionItemProvider == null) {
unionItemProvider = new UnionItemProvider(this);
}
return unionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.Intersection} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IntersectionItemProvider intersectionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.Intersection}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createIntersectionAdapter() {
if (intersectionItemProvider == null) {
intersectionItemProvider = new IntersectionItemProvider(this);
}
return intersectionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.Without} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected WithoutItemProvider withoutItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.Without}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createWithoutAdapter() {
if (withoutItemProvider == null) {
withoutItemProvider = new WithoutItemProvider(this);
}
return withoutItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.SetReference} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SetReferenceItemProvider setReferenceItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.SetReference}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createSetReferenceAdapter() {
if (setReferenceItemProvider == null) {
setReferenceItemProvider = new SetReferenceItemProvider(this);
}
return setReferenceItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.StandaloneFeatureSet} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected StandaloneFeatureSetItemProvider standaloneFeatureSetItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.StandaloneFeatureSet}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createStandaloneFeatureSetAdapter() {
if (standaloneFeatureSetItemProvider == null) {
standaloneFeatureSetItemProvider = new StandaloneFeatureSetItemProvider(this);
}
return standaloneFeatureSetItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.SubSet} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SubSetItemProvider subSetItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.SubSet}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createSubSetAdapter() {
if (subSetItemProvider == null) {
subSetItemProvider = new SubSetItemProvider(this);
}
return subSetItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.FeatureReference} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected FeatureReferenceItemProvider featureReferenceItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.FeatureReference}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createFeatureReferenceAdapter() {
if (featureReferenceItemProvider == null) {
featureReferenceItemProvider = new FeatureReferenceItemProvider(this);
}
return featureReferenceItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.FeatureAttribute} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected FeatureAttributeItemProvider featureAttributeItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.FeatureAttribute}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createFeatureAttributeAdapter() {
if (featureAttributeItemProvider == null) {
featureAttributeItemProvider = new FeatureAttributeItemProvider(this);
}
return featureAttributeItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.Configuration} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ConfigurationItemProvider configurationItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.Configuration}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createConfigurationAdapter() {
if (configurationItemProvider == null) {
configurationItemProvider = new ConfigurationItemProvider(this);
}
return configurationItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.features.FeatureVersion} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected FeatureVersionItemProvider featureVersionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.features.FeatureVersion}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createFeatureVersionAdapter() {
if (featureVersionItemProvider == null) {
featureVersionItemProvider = new FeatureVersionItemProvider(this);
}
return featureVersionItemProvider;
}
/**
* This returns the root adapter factory that contains this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ComposeableAdapterFactory getRootAdapterFactory() {
return parentAdapterFactory == null ? this : parentAdapterFactory.getRootAdapterFactory();
}
/**
* This sets the composed adapter factory that contains this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setParentAdapterFactory(ComposedAdapterFactory parentAdapterFactory) {
this.parentAdapterFactory = parentAdapterFactory;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean isFactoryForType(Object type) {
return supportedTypes.contains(type) || super.isFactoryForType(type);
}
/**
* This implementation substitutes the factory itself as the key for the adapter.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter adapt(Notifier notifier, Object type) {
return super.adapt(notifier, this);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object adapt(Object object, Object type) {
if (isFactoryForType(type)) {
Object adapter = super.adapt(object, type);
if (!(type instanceof Class<?>) || (((Class<?>)type).isInstance(adapter))) {
return adapter;
}
}
return null;
}
/**
* This adds a listener.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void addListener(INotifyChangedListener notifyChangedListener) {
changeNotifier.addListener(notifyChangedListener);
}
/**
* This removes a listener.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void removeListener(INotifyChangedListener notifyChangedListener) {
changeNotifier.removeListener(notifyChangedListener);
}
/**
* This delegates to {@link #changeNotifier} and to {@link #parentAdapterFactory}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void fireNotifyChanged(Notification notification) {
changeNotifier.fireNotifyChanged(notification);
if (parentAdapterFactory != null) {
parentAdapterFactory.fireNotifyChanged(notification);
}
}
/**
* This disposes all of the item providers created by this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void dispose() {
if (featureItemProvider != null) featureItemProvider.dispose();
if (explicitSetItemProvider != null) explicitSetItemProvider.dispose();
if (unionItemProvider != null) unionItemProvider.dispose();
if (intersectionItemProvider != null) intersectionItemProvider.dispose();
if (withoutItemProvider != null) withoutItemProvider.dispose();
if (setReferenceItemProvider != null) setReferenceItemProvider.dispose();
if (standaloneFeatureSetItemProvider != null) standaloneFeatureSetItemProvider.dispose();
if (subSetItemProvider != null) subSetItemProvider.dispose();
if (featureReferenceItemProvider != null) featureReferenceItemProvider.dispose();
if (featureAttributeItemProvider != null) featureAttributeItemProvider.dispose();
if (configurationItemProvider != null) configurationItemProvider.dispose();
if (featureVersionItemProvider != null) featureVersionItemProvider.dispose();
}
}
| |
package org.testng.internal;
import org.testng.ITestResult;
import org.testng.TestNGException;
import org.testng.collections.Lists;
import org.testng.collections.Maps;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Simple graph class to implement topological sort (used to sort methods based on what groups
* they depend on).
*
* @author Cedric Beust, Aug 19, 2004
*/
public class Graph<T extends Object> {
private static boolean m_verbose = false;
private Map<T, Node<T>> m_nodes = Maps.newHashMap();
private List<T> m_strictlySortedNodes = null;
// A map of nodes that are not the predecessors of any node
// (not needed for the algorithm but convenient to calculate
// the parallel/sequential lists in TestNG).
private Map<T, Node<T>> m_independentNodes = null;
public void addNode(T tm) {
ppp("ADDING NODE " + tm + " " + tm.hashCode());
m_nodes.put(tm, new Node<T>(tm));
// Initially, all the nodes are put in the independent list as well
}
public Set<T> getPredecessors(T node) {
return findNode(node).getPredecessors().keySet();
}
/*private boolean hasBeenSorted() {
return null != m_strictlySortedNodes;
}*/
public boolean isIndependent(T object) {
return m_independentNodes.containsKey(object);
}
private Node<T> findNode(T object) {
return m_nodes.get(object);
}
public void addPredecessor(T tm, T predecessor) {
Node<T> node = findNode(tm);
if (null == node) {
throw new TestNGException("Non-existing node: " + tm);
}
else {
node.addPredecessor(predecessor);
addNeighbor(tm, predecessor);
// Remove these two nodes from the independent list
if (null == m_independentNodes) {
m_independentNodes = Maps.newHashMap();
for (T k : m_nodes.keySet()) {
m_independentNodes.put(k, m_nodes.get(k));
}
}
m_independentNodes.remove(predecessor);
m_independentNodes.remove(tm);
ppp(" REMOVED " + predecessor + " FROM INDEPENDENT OBJECTS");
}
}
private void addNeighbor(T tm, T predecessor) {
findNode(tm).addNeighbor(findNode(predecessor));
}
public Set<T> getNeighbors(T t) {
Set<T> result = new HashSet<T>();
for (Node<T> n : findNode(t).getNeighbors()) {
result.add(n.getObject());
}
return result;
}
private Collection<Node<T>> getNodes() {
return m_nodes.values();
}
public Collection<T> getNodeValues() {
return m_nodes.keySet();
}
/**
* @return All the nodes that don't have any order with each other.
*/
public Set<T> getIndependentNodes() {
return m_independentNodes.keySet();
}
/**
* @return All the nodes that have an order with each other, sorted
* in one of the valid sorts.
*/
public List<T> getStrictlySortedNodes() {
return m_strictlySortedNodes;
}
public void topologicalSort() {
ppp("================ SORTING");
m_strictlySortedNodes = Lists.newArrayList();
if (null == m_independentNodes) {
m_independentNodes = Maps.newHashMap();
}
//
// Clone the list of nodes but only keep those that are
// not independent.
//
List<Node<T>> nodes2 = Lists.newArrayList();
for (Node<T> n : getNodes()) {
if (! isIndependent((T) n.getObject())) {
ppp("ADDING FOR SORT: " + n.getObject());
nodes2.add(n.clone());
}
else {
ppp("SKIPPING INDEPENDENT NODE " + n);
}
}
//
// Sort the nodes alphabetically to make sure that methods of the same class
// get run close to each other as much as possible
//
Collections.sort(nodes2);
//
// Sort
//
while (! nodes2.isEmpty()) {
//
// Find all the nodes that don't have any predecessors, add
// them to the result and mark them for removal
//
Node<T> node = findNodeWithNoPredecessors(nodes2);
if (null == node) {
List<T> cycle = new Tarjan<T>(this, nodes2.get(0).getObject()).getCycle();
StringBuffer sb = new StringBuffer();
sb.append("The following methods have cyclic dependencies:\n");
for (T m : cycle) {
sb.append(m).append("\n");
}
throw new TestNGException(sb.toString());
}
else {
m_strictlySortedNodes.add((T) node.getObject());
removeFromNodes(nodes2, node);
}
}
ppp("=============== DONE SORTING");
if (m_verbose) {
dumpSortedNodes();
}
}
private void dumpSortedNodes() {
System.out.println("====== SORTED NODES");
for (T n : m_strictlySortedNodes) {
System.out.println(" " + n);
}
System.out.println("====== END SORTED NODES");
}
private void dumpGraph() {
System.out.println("====== GRAPH");
for (Node<T> n : m_nodes.values()) {
System.out.println(" " + n);
}
}
/**
* Remove a node from a list of nodes and update the list of predecessors
* for all the remaining nodes.
*/
private void removeFromNodes(List<Node<T>> nodes, Node<T> node) {
nodes.remove(node);
for (Node<T> n : nodes) {
n.removePredecessor(node.getObject());
}
}
private static void ppp(String s) {
if (m_verbose) {
System.out.println("[Graph] " + s);
}
}
private Node<T> findNodeWithNoPredecessors(List<Node<T>> nodes) {
for (Node<T> n : nodes) {
if (! n.hasPredecessors()) {
return n;
}
}
return null;
}
/**
* @param o
* @return A list of all the predecessors for o
*/
public List<T> findPredecessors(T o) {
// Locate the node
Node<T> node = findNode(o);
if (null == node) {
// This can happen if an interceptor returned new methods
return Lists.newArrayList();
}
// If we found the node, use breadth first search to find all
// all of the predecessors of o. "result" is the growing list
// of all predecessors. "visited" is the set of items we've
// already encountered. "queue" is the queue of items whose
// predecessors we haven't yet explored.
LinkedList<T> result = new LinkedList<T>();
Set<T> visited = new HashSet<T>();
LinkedList<T> queue = new LinkedList<T>();
visited.add(o);
queue.addLast(o);
while (! queue.isEmpty()) {
for (T obj : getPredecessors(queue.removeFirst())) {
if (! visited.contains(obj)) {
visited.add(obj);
queue.addLast(obj);
result.addFirst(obj);
}
}
}
return result;
}
@Override
public String toString() {
StringBuffer result = new StringBuffer("[Graph ");
for (T node : m_nodes.keySet()) {
result.append(findNode(node)).append(" ");
}
result.append("]");
return result.toString();
}
/////
// class Node
//
public static class Node<T> implements Comparable<Node<T>> {
private T m_object = null;
private Map<T, T> m_predecessors = Maps.newHashMap();
public Node(T tm) {
m_object = tm;
}
private Set<Node<T>> m_neighbors = new HashSet<Node<T>>();
public void addNeighbor(Node<T> neighbor) {
m_neighbors.add(neighbor);
}
public Set<Node<T>> getNeighbors() {
return m_neighbors;
}
@Override
public Node<T> clone() {
Node<T> result = new Node<T>(m_object);
for (T pred : m_predecessors.values()) {
result.addPredecessor(pred);
}
return result;
}
public T getObject() {
return m_object;
}
public Map<T, T> getPredecessors() {
return m_predecessors;
}
/**
*
* @return true if this predecessor was found and removed
*/
public boolean removePredecessor(T o) {
boolean result = false;
// dump();
T pred = m_predecessors.get(o);
if (null != pred) {
result = null != m_predecessors.remove(o);
if (result) {
ppp(" REMOVED PRED " + o + " FROM NODE " + m_object);
}
else {
ppp(" FAILED TO REMOVE PRED " + o + " FROM NODE " + m_object);
}
}
return result;
}
private void dump() {
ppp(toString());
}
@Override
public String toString() {
StringBuffer sb = new StringBuffer("[Node:" + m_object);
sb.append(" pred:");
for (T o : m_predecessors.values()) {
sb.append(" " + o.toString());
}
sb.append("]");
String result = sb.toString();
return result;
}
public void addPredecessor(T tm) {
ppp(" ADDING PREDECESSOR FOR " + m_object + " ==> " + tm);
m_predecessors.put(tm, tm);
}
public boolean hasPredecessors() {
return m_predecessors.size() > 0;
}
public boolean hasPredecessor(T m) {
return m_predecessors.containsKey(m);
}
public int compareTo(Node<T> o) {
return getObject().toString().compareTo(o.getObject().toString());
}
}
//
// class Node
/////
public static void main(String[] argv) {
Graph<String> g = new Graph<String>();
g.addNode("3");
g.addNode("1");
g.addNode("2.2");
g.addNode("independent");
g.addNode("2.1");
g.addNode("2.3");
// 1 -> 2.1, 2.2, 2.3 --> 3
g.addPredecessor("3", "2.1");
g.addPredecessor("3", "2.1");
g.addPredecessor("3", "2.3");
g.addPredecessor("2.1", "1");
g.addPredecessor("2.2", "1");
g.addPredecessor("2.3", "1");
g.topologicalSort();
List<String> l = g.getStrictlySortedNodes();
for (String s : l) {
System.out.println(" " + s);
}
int i = 0;
assert "1".equals(l.get(i));
i++;
assert "2.1".equals(l.get(i)) || "2.1".equals(l.get(i)) || "2.2".equals(l.get(i));
i++;
assert "2.1".equals(l.get(i)) || "2.1".equals(l.get(i)) || "2.2".equals(l.get(i));
i++;
assert "2.1".equals(l.get(i)) || "2.1".equals(l.get(i)) || "2.2".equals(l.get(i));
i++;
assert "3".equals(l.get(i));
assert 1 == g.getIndependentNodes().size();
//
// Test findPredecessors
//
ppp("GRAPH:" + g);
{
List<String> predecessors = g.findPredecessors("2.1");
assert predecessors.size() == 1;
assert predecessors.get(0).equals("1");
}
{
List<String> predecessors = g.findPredecessors("3");
assert predecessors.size() == 4;
assert predecessors.get(0).equals("1");
assert predecessors.get(1).equals("2.1") ||
predecessors.get(2).equals("2.2") ||
predecessors.get(2).equals("2");
assert predecessors.get(2).equals("2.1") ||
predecessors.get(2).equals("2.2") ||
predecessors.get(2).equals("2");
assert predecessors.get(3).equals("2.1") ||
predecessors.get(2).equals("2.2") ||
predecessors.get(2).equals("2");
}
ppp("TESTS PASSED");
}
}
| |
package com.sumzerotrading.eod.trading.strategy;
import com.sumzerotrading.broker.order.TradeOrder;
import com.sumzerotrading.broker.order.TradeOrder.Type;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.time.LocalTime;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author RobTerpilowski
*/
public class EODSystemProperties {
protected LocalTime startTime;
protected LocalTime marketCloseTime;
protected LocalTime exitTime;
protected String twsHost;
protected int twsPort;
protected int twsClientId;
protected int tradeSizeDollars;
protected Map<String,String> longShortTickerMap = new HashMap<>();
protected String strategyDirectory;
protected TradeOrder.Type entryOrderType;
protected int exitSeconds = 0;
protected TradeOrder.Type exitOrderType;
//For Unit tests
protected EODSystemProperties() {
}
public EODSystemProperties(String fileName) throws IOException {
this( new FileInputStream(fileName) );
}
public EODSystemProperties(InputStream inputStream) throws IOException {
Properties props = new Properties();
props.load(inputStream);
inputStream.close();
parseProps(props);
}
public LocalTime getStartTime() {
return startTime;
}
public String getTwsHost() {
return twsHost;
}
public int getTwsPort() {
return twsPort;
}
public int getTwsClientId() {
return twsClientId;
}
public int getTradeSizeDollars() {
return tradeSizeDollars;
}
public Map<String, String> getLongShortTickerMap() {
return longShortTickerMap;
}
public LocalTime getMarketCloseTime() {
return marketCloseTime;
}
public String getStrategyDirectory() {
return strategyDirectory;
}
public TradeOrder.Type getEntryOrderType() {
return entryOrderType;
}
public Type getExitOrderType() {
return exitOrderType;
}
public int getExitSeconds() {
return exitSeconds;
}
public LocalTime getExitTime() {
return exitTime;
}
protected void parseProps( Properties props ) {
twsHost = props.getProperty("tws.host");
twsPort = Integer.parseInt(props.getProperty("tws.port"));
twsClientId = Integer.parseInt(props.getProperty("tws.client.id"));
tradeSizeDollars = Integer.parseInt(props.getProperty("trade.size.dollars"));
startTime = LocalTime.parse(props.getProperty("start.time"));
exitTime = LocalTime.parse(props.getProperty("exit.time"));
marketCloseTime = LocalTime.parse(props.getProperty("market.close.time"));
strategyDirectory = props.getProperty("strategy.directory");
entryOrderType = getOrderType(props.getProperty("entry.order.type", "MOC"));
longShortTickerMap = getLongShortTickers(props);
exitOrderType = getOrderType(props.getProperty("exit.order.type", "MOO"));
exitSeconds = Integer.parseInt(props.getProperty("exit.seconds", "0"));
}
protected TradeOrder.Type getOrderType( String orderType ) {
if( "MKT".equalsIgnoreCase(orderType) ) {
return Type.MARKET;
} else if( "MOC".equalsIgnoreCase(orderType) ) {
return Type.MARKET_ON_CLOSE;
} else if( "MOO".equalsIgnoreCase(orderType) ) {
return Type.MARKET_ON_OPEN;
} else {
throw new IllegalStateException( "Unknown order.type: " + orderType );
}
}
protected Map<String,String> getLongShortTickers( Properties props ) {
Map<String,String> map = new HashMap<>();
for( Object key : props.keySet()) {
String keyString = (String) key;
if( keyString.startsWith("pair") ) {
String[] tickers = props.getProperty(keyString).split(":");
map.put(tickers[0], tickers[1]);
}
}
return map;
}
@Override
public int hashCode() {
int hash = 3;
hash = 47 * hash + Objects.hashCode(this.startTime);
hash = 47 * hash + Objects.hashCode(this.marketCloseTime);
hash = 47 * hash + Objects.hashCode(this.exitTime);
hash = 47 * hash + Objects.hashCode(this.twsHost);
hash = 47 * hash + this.twsPort;
hash = 47 * hash + this.twsClientId;
hash = 47 * hash + this.tradeSizeDollars;
hash = 47 * hash + Objects.hashCode(this.longShortTickerMap);
hash = 47 * hash + Objects.hashCode(this.strategyDirectory);
hash = 47 * hash + Objects.hashCode(this.entryOrderType);
hash = 47 * hash + this.exitSeconds;
hash = 47 * hash + Objects.hashCode(this.exitOrderType);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final EODSystemProperties other = (EODSystemProperties) obj;
if (this.twsPort != other.twsPort) {
return false;
}
if (this.twsClientId != other.twsClientId) {
return false;
}
if (this.tradeSizeDollars != other.tradeSizeDollars) {
return false;
}
if (this.exitSeconds != other.exitSeconds) {
return false;
}
if (!Objects.equals(this.twsHost, other.twsHost)) {
return false;
}
if (!Objects.equals(this.strategyDirectory, other.strategyDirectory)) {
return false;
}
if (!Objects.equals(this.startTime, other.startTime)) {
return false;
}
if (!Objects.equals(this.marketCloseTime, other.marketCloseTime)) {
return false;
}
if (!Objects.equals(this.exitTime, other.exitTime)) {
return false;
}
if (!Objects.equals(this.longShortTickerMap, other.longShortTickerMap)) {
return false;
}
if (this.entryOrderType != other.entryOrderType) {
return false;
}
if (this.exitOrderType != other.exitOrderType) {
return false;
}
return true;
}
@Override
public String toString() {
return "EODSystemProperties{" + "startTime=" + startTime + ", marketCloseTime=" + marketCloseTime + ", exitTime=" + exitTime + ", twsHost=" + twsHost + ", twsPort=" + twsPort + ", twsClientId=" + twsClientId + ", tradeSizeDollars=" + tradeSizeDollars + ", longShortTickerMap=" + longShortTickerMap + ", strategyDirectory=" + strategyDirectory + ", entryOrderType=" + entryOrderType + ", exitSeconds=" + exitSeconds + ", exitOrderType=" + exitOrderType + '}';
}
}
| |
package ca.krasnay.sqlbuilder;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.springframework.jdbc.core.PreparedStatementCreator;
/**
* A Spring PreparedStatementCreator that you can use like a SelectBuilder.
* Example usage is as follows:
*
* <pre>
* PreparedStatementCreator psc = new SelectCreator()
* .column("name")
* .column("salary")
* .from("emp")
* .whereEquals("id", employeeId)
* .and("salary > :limit")
* .setParameter("limit", 100000);
*
* new JdbcTemplate(dataSource).query(psc, new RowMapper() { ... });
* </pre>
*
* @author John Krasnay <john@krasnay.ca>
*/
public class SelectCreator extends AbstractSqlCreator implements Cloneable {
private static final long serialVersionUID = 1;
private SelectBuilder builder = new SelectBuilder();
public SelectCreator() {
}
/**
* Copy constructor. Used by {@link #clone()}.
*
* @param other
* SelectCreator being cloned.
*/
protected SelectCreator(SelectCreator other) {
super(other);
this.builder = other.builder.clone();
}
public SelectCreator and(String expr) {
return where(expr);
}
public SelectCreator and(Predicate predicate) {
return where(predicate);
}
@Override
public SelectCreator clone() {
return new SelectCreator(this);
}
public SelectCreator column(String name) {
builder.column(name);
return this;
}
public SelectCreator column(String name, boolean groupBy) {
builder.column(name, groupBy);
return this;
}
/**
* Returns a PreparedStatementCreator that returns a count of the rows that
* this creator would return.
*
* @param dialect
* Database dialect.
*/
public PreparedStatementCreator count(final Dialect dialect) {
return new PreparedStatementCreator() {
public PreparedStatement createPreparedStatement(Connection con)
throws SQLException {
return getPreparedStatementCreator()
.setSql(dialect.createCountSelect(builder.toString()))
.createPreparedStatement(con);
}
};
}
public SelectCreator distinct() {
builder.distinct();
return this;
}
public SelectCreator forUpdate() {
builder.forUpdate();
return this;
}
public SelectCreator from(String table) {
builder.from(table);
return this;
}
@Override
protected AbstractSqlBuilder getBuilder() {
return builder;
}
public List<UnionSelectCreator> getUnions() {
List<UnionSelectCreator> unions = new ArrayList<UnionSelectCreator>();
for (SelectBuilder unionSB : builder.getUnions()) {
unions.add(new UnionSelectCreator(this, unionSB));
}
return unions;
}
public SelectCreator groupBy(String expr) {
builder.groupBy(expr);
return this;
}
public SelectCreator having(String expr) {
builder.having(expr);
return this;
}
public SelectCreator join(String join) {
builder.join(join);
return this;
}
public SelectCreator leftJoin(String join) {
builder.leftJoin(join);
return this;
}
public SelectCreator noWait() {
builder.noWait();
return this;
}
public SelectCreator orderBy(String name) {
builder.orderBy(name);
return this;
}
public SelectCreator orderBy(String name, boolean ascending) {
builder.orderBy(name, ascending);
return this;
}
/**
* Returns a PreparedStatementCreator that returns a page of the underlying
* result set.
*
* @param dialect
* Database dialect to use.
* @param limit
* Maximum number of rows to return.
* @param offset
* Index of the first row to return.
*/
public PreparedStatementCreator page(final Dialect dialect, final int limit, final int offset) {
return new PreparedStatementCreator() {
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
return getPreparedStatementCreator()
.setSql(dialect.createPageSelect(builder.toString(), limit, offset))
.createPreparedStatement(con);
}
};
}
@Override
public SelectCreator setParameter(String name, Object value) {
super.setParameter(name, value);
return this;
}
public SubSelectCreator subSelectColumn(String alias) {
SubSelectBuilder subSelectBuilder = new SubSelectBuilder(alias);
builder.column(subSelectBuilder);
return new SubSelectCreator(this, subSelectBuilder);
}
@Override
public String toString() {
ParameterizedPreparedStatementCreator ppsc = getPreparedStatementCreator();
StringBuilder sb = new StringBuilder(builder.toString());
List<String> params = new ArrayList<String>(ppsc.getParameterMap().keySet());
Collections.sort(params);
for (String s : params) {
sb.append(", ").append(s).append("=").append(ppsc.getParameterMap().get(s));
}
return sb.toString();
}
public UnionSelectCreator union() {
SelectBuilder unionSelectBuilder = new SelectBuilder();
builder.union(unionSelectBuilder);
return new UnionSelectCreator(this, unionSelectBuilder);
}
public SelectCreator where(String expr) {
builder.where(expr);
return this;
}
public SelectCreator where(Predicate predicate) {
predicate.init(this);
builder.where(predicate.toSql());
return this;
}
public SelectCreator whereEquals(String expr, Object value) {
String param = allocateParameter();
builder.where(expr + " = :" + param);
setParameter(param, value);
return this;
}
public SelectCreator whereIn(String expr, List<?> values) {
StringBuilder sb = new StringBuilder();
sb.append(expr).append(" in (");
boolean first = true;
for (Object value : values) {
String param = allocateParameter();
setParameter(param, value);
if (!first) {
sb.append(", ");
}
sb.append(":").append(param);
first = false;
}
sb.append(")");
builder.where(sb.toString());
return this;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.containeranalysis.v1beta1.model;
/**
* Identifies all appearances of this vulnerability in the package for a specific distro/location.
* For example: glibc in cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Container Analysis API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Detail extends com.google.api.client.json.GenericJson {
/**
* Required. The CPE URI in [cpe format](https://cpe.mitre.org/specification/) in which the
* vulnerability manifests. Examples include distro or storage location for vulnerable jar.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String cpeUri;
/**
* A vendor-specific description of this note.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* The fix for this specific package version.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private VulnerabilityLocation fixedLocation;
/**
* Whether this detail is obsolete. Occurrences are expected not to point to obsolete details.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean isObsolete;
/**
* The max version of the package in which the vulnerability exists.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Version maxAffectedVersion;
/**
* The min version of the package in which the vulnerability exists.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Version minAffectedVersion;
/**
* Required. The name of the package where the vulnerability was found.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key("package")
private java.lang.String package__;
/**
* The type of package; whether native or non native(ruby gems, node.js packages etc).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String packageType;
/**
* The severity (eg: distro assigned severity) for this vulnerability.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String severityName;
/**
* The time this information was last changed at the source. This is an upstream timestamp from
* the underlying information source - e.g. Ubuntu security tracker.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String sourceUpdateTime;
/**
* Required. The CPE URI in [cpe format](https://cpe.mitre.org/specification/) in which the
* vulnerability manifests. Examples include distro or storage location for vulnerable jar.
* @return value or {@code null} for none
*/
public java.lang.String getCpeUri() {
return cpeUri;
}
/**
* Required. The CPE URI in [cpe format](https://cpe.mitre.org/specification/) in which the
* vulnerability manifests. Examples include distro or storage location for vulnerable jar.
* @param cpeUri cpeUri or {@code null} for none
*/
public Detail setCpeUri(java.lang.String cpeUri) {
this.cpeUri = cpeUri;
return this;
}
/**
* A vendor-specific description of this note.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* A vendor-specific description of this note.
* @param description description or {@code null} for none
*/
public Detail setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* The fix for this specific package version.
* @return value or {@code null} for none
*/
public VulnerabilityLocation getFixedLocation() {
return fixedLocation;
}
/**
* The fix for this specific package version.
* @param fixedLocation fixedLocation or {@code null} for none
*/
public Detail setFixedLocation(VulnerabilityLocation fixedLocation) {
this.fixedLocation = fixedLocation;
return this;
}
/**
* Whether this detail is obsolete. Occurrences are expected not to point to obsolete details.
* @return value or {@code null} for none
*/
public java.lang.Boolean getIsObsolete() {
return isObsolete;
}
/**
* Whether this detail is obsolete. Occurrences are expected not to point to obsolete details.
* @param isObsolete isObsolete or {@code null} for none
*/
public Detail setIsObsolete(java.lang.Boolean isObsolete) {
this.isObsolete = isObsolete;
return this;
}
/**
* The max version of the package in which the vulnerability exists.
* @return value or {@code null} for none
*/
public Version getMaxAffectedVersion() {
return maxAffectedVersion;
}
/**
* The max version of the package in which the vulnerability exists.
* @param maxAffectedVersion maxAffectedVersion or {@code null} for none
*/
public Detail setMaxAffectedVersion(Version maxAffectedVersion) {
this.maxAffectedVersion = maxAffectedVersion;
return this;
}
/**
* The min version of the package in which the vulnerability exists.
* @return value or {@code null} for none
*/
public Version getMinAffectedVersion() {
return minAffectedVersion;
}
/**
* The min version of the package in which the vulnerability exists.
* @param minAffectedVersion minAffectedVersion or {@code null} for none
*/
public Detail setMinAffectedVersion(Version minAffectedVersion) {
this.minAffectedVersion = minAffectedVersion;
return this;
}
/**
* Required. The name of the package where the vulnerability was found.
* @return value or {@code null} for none
*/
public java.lang.String getPackage() {
return package__;
}
/**
* Required. The name of the package where the vulnerability was found.
* @param package__ package__ or {@code null} for none
*/
public Detail setPackage(java.lang.String package__) {
this.package__ = package__;
return this;
}
/**
* The type of package; whether native or non native(ruby gems, node.js packages etc).
* @return value or {@code null} for none
*/
public java.lang.String getPackageType() {
return packageType;
}
/**
* The type of package; whether native or non native(ruby gems, node.js packages etc).
* @param packageType packageType or {@code null} for none
*/
public Detail setPackageType(java.lang.String packageType) {
this.packageType = packageType;
return this;
}
/**
* The severity (eg: distro assigned severity) for this vulnerability.
* @return value or {@code null} for none
*/
public java.lang.String getSeverityName() {
return severityName;
}
/**
* The severity (eg: distro assigned severity) for this vulnerability.
* @param severityName severityName or {@code null} for none
*/
public Detail setSeverityName(java.lang.String severityName) {
this.severityName = severityName;
return this;
}
/**
* The time this information was last changed at the source. This is an upstream timestamp from
* the underlying information source - e.g. Ubuntu security tracker.
* @return value or {@code null} for none
*/
public String getSourceUpdateTime() {
return sourceUpdateTime;
}
/**
* The time this information was last changed at the source. This is an upstream timestamp from
* the underlying information source - e.g. Ubuntu security tracker.
* @param sourceUpdateTime sourceUpdateTime or {@code null} for none
*/
public Detail setSourceUpdateTime(String sourceUpdateTime) {
this.sourceUpdateTime = sourceUpdateTime;
return this;
}
@Override
public Detail set(String fieldName, Object value) {
return (Detail) super.set(fieldName, value);
}
@Override
public Detail clone() {
return (Detail) super.clone();
}
}
| |
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hu.tamaskojedzinszky.android.materialsearch.sample.widget;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.widget.LinearLayout;
import hu.tamaskojedzinszky.android.materialsearch.sample.R;
class SlidingTabStrip extends LinearLayout {
private static final int DEFAULT_BOTTOM_BORDER_THICKNESS_DIPS = 1;
private static final byte DEFAULT_BOTTOM_BORDER_COLOR_ALPHA = 0x26;
private static final int SELECTED_INDICATOR_THICKNESS_DIPS = 5;
private static final int DEFAULT_SELECTED_INDICATOR_COLOR = 0xFF33B5E5;
private static final int DEFAULT_DIVIDER_THICKNESS_DIPS = 1;
private static final byte DEFAULT_DIVIDER_COLOR_ALPHA = 0x20;
private static final float DEFAULT_DIVIDER_HEIGHT = 0.5f;
private final int mBottomBorderThickness;
private final Paint mBottomBorderPaint;
private final int mSelectedIndicatorThickness;
private final Paint mSelectedIndicatorPaint;
private final int mDefaultBottomBorderColor;
private final Paint mDividerPaint;
private final float mDividerHeight;
private int mSelectedPosition;
private float mSelectionOffset;
private SlidingTabLayout.TabColorizer mCustomTabColorizer;
private final SimpleTabColorizer mDefaultTabColorizer;
SlidingTabStrip(Context context) {
this(context, null);
}
SlidingTabStrip(Context context, AttributeSet attrs) {
super(context, attrs);
setWillNotDraw(false);
final float density = getResources().getDisplayMetrics().density;
TypedValue outValue = new TypedValue();
context.getTheme().resolveAttribute(R.attr.colorPrimary, outValue, true);
final int themeForegroundColor = outValue.data;
mDefaultBottomBorderColor = setColorAlpha(themeForegroundColor,
DEFAULT_BOTTOM_BORDER_COLOR_ALPHA);
mDefaultTabColorizer = new SimpleTabColorizer();
mDefaultTabColorizer.setIndicatorColors(DEFAULT_SELECTED_INDICATOR_COLOR);
mDefaultTabColorizer.setDividerColors(setColorAlpha(themeForegroundColor,
DEFAULT_DIVIDER_COLOR_ALPHA));
mBottomBorderThickness = (int) (DEFAULT_BOTTOM_BORDER_THICKNESS_DIPS * density);
mBottomBorderPaint = new Paint();
mBottomBorderPaint.setColor(mDefaultBottomBorderColor);
mSelectedIndicatorThickness = (int) (SELECTED_INDICATOR_THICKNESS_DIPS * density);
mSelectedIndicatorPaint = new Paint();
mDividerHeight = DEFAULT_DIVIDER_HEIGHT;
mDividerPaint = new Paint();
mDividerPaint.setStrokeWidth((int) (DEFAULT_DIVIDER_THICKNESS_DIPS * density));
}
void setCustomTabColorizer(SlidingTabLayout.TabColorizer customTabColorizer) {
mCustomTabColorizer = customTabColorizer;
invalidate();
}
void setSelectedIndicatorColors(int... colors) {
// Make sure that the custom colorizer is removed
mCustomTabColorizer = null;
mDefaultTabColorizer.setIndicatorColors(colors);
invalidate();
}
void setDividerColors(int... colors) {
// Make sure that the custom colorizer is removed
mCustomTabColorizer = null;
mDefaultTabColorizer.setDividerColors(colors);
invalidate();
}
void onViewPagerPageChanged(int position, float positionOffset) {
mSelectedPosition = position;
mSelectionOffset = positionOffset;
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
final int height = getHeight();
final int childCount = getChildCount();
final int dividerHeightPx = (int) (Math.min(Math.max(0f, mDividerHeight), 1f) * height);
final SlidingTabLayout.TabColorizer tabColorizer = mCustomTabColorizer != null
? mCustomTabColorizer
: mDefaultTabColorizer;
// Thick colored underline below the current selection
if (childCount > 0) {
View selectedTitle = getChildAt(mSelectedPosition);
int left = selectedTitle.getLeft();
int right = selectedTitle.getRight();
int color = tabColorizer.getIndicatorColor(mSelectedPosition);
if (mSelectionOffset > 0f && mSelectedPosition < (getChildCount() - 1)) {
int nextColor = tabColorizer.getIndicatorColor(mSelectedPosition + 1);
if (color != nextColor) {
color = blendColors(nextColor, color, mSelectionOffset);
}
// Draw the selection partway between the tabs
View nextTitle = getChildAt(mSelectedPosition + 1);
left = (int) (mSelectionOffset * nextTitle.getLeft() +
(1.0f - mSelectionOffset) * left);
right = (int) (mSelectionOffset * nextTitle.getRight() +
(1.0f - mSelectionOffset) * right);
}
mSelectedIndicatorPaint.setColor(color);
canvas.drawRect(left, height - mSelectedIndicatorThickness, right,
height, mSelectedIndicatorPaint);
}
// Thin underline along the entire bottom edge
canvas.drawRect(0, height - mBottomBorderThickness, getWidth(), height, mBottomBorderPaint);
// Vertical separators between the titles
int separatorTop = (height - dividerHeightPx) / 2;
for (int i = 0; i < childCount - 1; i++) {
View child = getChildAt(i);
mDividerPaint.setColor(tabColorizer.getDividerColor(i));
canvas.drawLine(child.getRight(), separatorTop, child.getRight(),
separatorTop + dividerHeightPx, mDividerPaint);
}
}
/**
* Set the alpha value of the {@code color} to be the given {@code alpha} value.
*/
private static int setColorAlpha(int color, byte alpha) {
return Color.argb(alpha, Color.red(color), Color.green(color), Color.blue(color));
}
/**
* Blend {@code color1} and {@code color2} using the given ratio.
*
* @param ratio of which to blend. 1.0 will return {@code color1}, 0.5 will give an even blend,
* 0.0 will return {@code color2}.
*/
private static int blendColors(int color1, int color2, float ratio) {
final float inverseRation = 1f - ratio;
float r = (Color.red(color1) * ratio) + (Color.red(color2) * inverseRation);
float g = (Color.green(color1) * ratio) + (Color.green(color2) * inverseRation);
float b = (Color.blue(color1) * ratio) + (Color.blue(color2) * inverseRation);
return Color.rgb((int) r, (int) g, (int) b);
}
private static class SimpleTabColorizer implements SlidingTabLayout.TabColorizer {
private int[] mIndicatorColors;
private int[] mDividerColors;
@Override
public final int getIndicatorColor(int position) {
return mIndicatorColors[position % mIndicatorColors.length];
}
@Override
public final int getDividerColor(int position) {
return mDividerColors[position % mDividerColors.length];
}
void setIndicatorColors(int... colors) {
mIndicatorColors = colors;
}
void setDividerColors(int... colors) {
mDividerColors = colors;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote.ajp;
import java.util.Hashtable;
/**
* Constants.
*
* @author Remy Maucherat
*/
public final class Constants {
public static final int DEFAULT_CONNECTION_TIMEOUT = -1;
// Prefix codes for message types from server to container
public static final byte JK_AJP13_FORWARD_REQUEST = 2;
public static final byte JK_AJP13_SHUTDOWN = 7; // XXX Unused
public static final byte JK_AJP13_PING_REQUEST = 8; // XXX Unused
public static final byte JK_AJP13_CPING_REQUEST = 10;
// Prefix codes for message types from container to server
public static final byte JK_AJP13_SEND_BODY_CHUNK = 3;
public static final byte JK_AJP13_SEND_HEADERS = 4;
public static final byte JK_AJP13_END_RESPONSE = 5;
public static final byte JK_AJP13_GET_BODY_CHUNK = 6;
public static final byte JK_AJP13_CPONG_REPLY = 9;
// Integer codes for common response header strings
public static final int SC_RESP_CONTENT_TYPE = 0xA001;
public static final int SC_RESP_CONTENT_LANGUAGE = 0xA002;
public static final int SC_RESP_CONTENT_LENGTH = 0xA003;
public static final int SC_RESP_DATE = 0xA004;
public static final int SC_RESP_LAST_MODIFIED = 0xA005;
public static final int SC_RESP_LOCATION = 0xA006;
public static final int SC_RESP_SET_COOKIE = 0xA007;
public static final int SC_RESP_SET_COOKIE2 = 0xA008;
public static final int SC_RESP_SERVLET_ENGINE = 0xA009;
public static final int SC_RESP_STATUS = 0xA00A;
public static final int SC_RESP_WWW_AUTHENTICATE = 0xA00B;
public static final int SC_RESP_AJP13_MAX = 11;
// Integer codes for common (optional) request attribute names
public static final byte SC_A_CONTEXT = 1; // XXX Unused
public static final byte SC_A_SERVLET_PATH = 2; // XXX Unused
public static final byte SC_A_REMOTE_USER = 3;
public static final byte SC_A_AUTH_TYPE = 4;
public static final byte SC_A_QUERY_STRING = 5;
public static final byte SC_A_JVM_ROUTE = 6;
public static final byte SC_A_SSL_CERT = 7;
public static final byte SC_A_SSL_CIPHER = 8;
public static final byte SC_A_SSL_SESSION = 9;
public static final byte SC_A_SSL_KEY_SIZE = 11;
public static final byte SC_A_SECRET = 12;
public static final byte SC_A_STORED_METHOD = 13;
// Used for attributes which are not in the list above
public static final byte SC_A_REQ_ATTRIBUTE = 10;
/**
* AJP private request attributes
*/
public static final String SC_A_REQ_LOCAL_ADDR = "AJP_LOCAL_ADDR";
public static final String SC_A_REQ_REMOTE_PORT = "AJP_REMOTE_PORT";
public static final String SC_A_SSL_PROTOCOL = "AJP_SSL_PROTOCOL";
// Terminates list of attributes
public static final byte SC_A_ARE_DONE = (byte)0xFF;
// Ajp13 specific - needs refactoring for the new model
/**
* Default maximum total byte size for a AJP packet
*/
public static final int MAX_PACKET_SIZE = 8192;
/**
* Size of basic packet header
*/
public static final int H_SIZE = 4;
/**
* Size of the header metadata
*/
public static final int READ_HEAD_LEN = 6;
public static final int SEND_HEAD_LEN = 8;
/**
* Default maximum size of data that can be sent in one packet
*/
public static final int MAX_READ_SIZE = MAX_PACKET_SIZE - READ_HEAD_LEN;
public static final int MAX_SEND_SIZE = MAX_PACKET_SIZE - SEND_HEAD_LEN;
// Translates integer codes to names of HTTP methods
private static final String [] methodTransArray = {
"OPTIONS",
"GET",
"HEAD",
"POST",
"PUT",
"DELETE",
"TRACE",
"PROPFIND",
"PROPPATCH",
"MKCOL",
"COPY",
"MOVE",
"LOCK",
"UNLOCK",
"ACL",
"REPORT",
"VERSION-CONTROL",
"CHECKIN",
"CHECKOUT",
"UNCHECKOUT",
"SEARCH",
"MKWORKSPACE",
"UPDATE",
"LABEL",
"MERGE",
"BASELINE-CONTROL",
"MKACTIVITY"
};
/**
* Converts an AJP coded HTTP method to the method name.
* @param code the coded value
* @return the string value of the method
*/
public static final String getMethodForCode(final int code) {
return methodTransArray[code];
}
public static final int SC_M_JK_STORED = (byte) 0xFF;
// id's for common request headers
public static final int SC_REQ_ACCEPT = 1;
public static final int SC_REQ_ACCEPT_CHARSET = 2;
public static final int SC_REQ_ACCEPT_ENCODING = 3;
public static final int SC_REQ_ACCEPT_LANGUAGE = 4;
public static final int SC_REQ_AUTHORIZATION = 5;
public static final int SC_REQ_CONNECTION = 6;
public static final int SC_REQ_CONTENT_TYPE = 7;
public static final int SC_REQ_CONTENT_LENGTH = 8;
public static final int SC_REQ_COOKIE = 9;
public static final int SC_REQ_COOKIE2 = 10;
public static final int SC_REQ_HOST = 11;
public static final int SC_REQ_PRAGMA = 12;
public static final int SC_REQ_REFERER = 13;
public static final int SC_REQ_USER_AGENT = 14;
// Translates integer codes to request header names
private static final String [] headerTransArray = {
"accept",
"accept-charset",
"accept-encoding",
"accept-language",
"authorization",
"connection",
"content-type",
"content-length",
"cookie",
"cookie2",
"host",
"pragma",
"referer",
"user-agent"
};
/**
* Converts an AJP coded HTTP request header to the header name.
* @param code the coded value
* @return the string value of the header name
*/
public static final String getHeaderForCode(final int code) {
return headerTransArray[code];
}
// Translates integer codes to response header names
private static final String [] responseTransArray = {
"Content-Type",
"Content-Language",
"Content-Length",
"Date",
"Last-Modified",
"Location",
"Set-Cookie",
"Set-Cookie2",
"Servlet-Engine",
"Status",
"WWW-Authenticate"
};
/**
* Converts an AJP coded response header name to the HTTP response header name.
* @param code the coded value
* @return the string value of the header
*/
public static final String getResponseHeaderForCode(final int code) {
return responseTransArray[code];
}
private static final Hashtable<String,Integer> responseTransHash =
new Hashtable<>(20);
static {
try {
int i;
for (i = 0; i < SC_RESP_AJP13_MAX; i++) {
responseTransHash.put(getResponseHeaderForCode(i),
Integer.valueOf(0xA001 + i));
}
}
catch (Exception e) {
// Do nothing
}
}
public static final int getResponseAjpIndex(String header)
{
Integer i = responseTransHash.get(header);
if (i == null)
return 0;
else
return i.intValue();
}
}
| |
/**********************************************************************************
*
* $Id: SakaiProperties.java 105077 2012-02-24 22:54:29Z ottenhoff@longsight.com $
*
***********************************************************************************
*
* Copyright (c) 2007, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
import org.springframework.core.io.Resource;
import org.springframework.util.CollectionUtils;
import org.springframework.util.DefaultPropertiesPersister;
import org.springframework.util.PropertiesPersister;
/**
* A configurer for "sakai.properties" files. These differ from the usual Spring default properties
* files by mixing together lines which define property-value pairs and lines which define
* bean property overrides. The two can be distinguished because Sakai conventionally uses
* the bean name separator "@" instead of the default "."
*
* This class creates separate PropertyPlaceholderConfigurer and PropertyOverrideConfigurer
* objects to handle bean configuration, and loads them with the input properties.
*
* SakaiProperties configuration supports most of the properties documented for
* PropertiesFactoryBean, PropertyPlaceholderConfigurer, and PropertyOverrideConfigurer.
*/
public class SakaiProperties implements BeanFactoryPostProcessorCreator, InitializingBean {
private static Log log = LogFactory.getLog(SakaiProperties.class);
private SakaiPropertiesFactoryBean propertiesFactoryBean = new SakaiPropertiesFactoryBean();
//private PropertiesFactoryBean propertiesFactoryBean = new PropertiesFactoryBean();
private ReversiblePropertyOverrideConfigurer propertyOverrideConfigurer = new ReversiblePropertyOverrideConfigurer();
private PropertyPlaceholderConfigurer propertyPlaceholderConfigurer = new PropertyPlaceholderConfigurer();
public SakaiProperties() {
// Set defaults.
propertiesFactoryBean.setIgnoreResourceNotFound(true);
propertyPlaceholderConfigurer.setIgnoreUnresolvablePlaceholders(true);
propertyPlaceholderConfigurer.setOrder(0);
propertyOverrideConfigurer.setBeanNameAtEnd(true);
propertyOverrideConfigurer.setBeanNameSeparator("@");
propertyOverrideConfigurer.setIgnoreInvalidKeys(true);
}
public void afterPropertiesSet() throws Exception {
// Connect properties to configurers.
propertiesFactoryBean.afterPropertiesSet();
propertyPlaceholderConfigurer.setProperties((Properties)propertiesFactoryBean.getObject());
propertyOverrideConfigurer.setProperties((Properties)propertiesFactoryBean.getObject());
}
/* (non-Javadoc)
* @see org.sakaiproject.util.BeanFactoryPostProcessorCreator#getBeanFactoryPostProcessors()
*/
public Collection<BeanFactoryPostProcessor> getBeanFactoryPostProcessors() {
return (Arrays.asList(new BeanFactoryPostProcessor[] {propertyOverrideConfigurer, propertyPlaceholderConfigurer}));
}
/**
* Gets the individual properties from each properties file which is read in
*
* @return a map of filename -> Properties
*/
public Map<String, Properties> getSeparateProperties() {
LinkedHashMap<String, Properties> m = new LinkedHashMap<String, Properties>();
/* This doesn't work because spring always returns only the first of the properties files -AZ
* very disappointing because it means we can't tell which file a property came from
try {
// have to use reflection to get the fields here because Spring does not expose them directly
Field localPropertiesField = PropertiesLoaderSupport.class.getDeclaredField("localProperties");
Field locationsField = PropertiesLoaderSupport.class.getDeclaredField("locations");
localPropertiesField.setAccessible(true);
locationsField.setAccessible(true);
Properties[] localProperties = (Properties[]) localPropertiesField.get(propertiesFactoryBean);
Resource[] locations = (Resource[]) locationsField.get(propertiesFactoryBean);
log.info("found "+locations.length+" locations and "+localProperties.length+" props files");
for (int i = 0; i < localProperties.length; i++) {
Properties p = localProperties[i];
Properties props = dereferenceProperties(p);
Resource r = locations[i];
log.info("found "+p.size()+" props ("+props.size()+") in "+r.getFilename());
if (m.put(r.getFilename(), props) != null) {
log.warn("SeparateProperties: Found use of 2 sakai properties files with the same name (probable data loss): "+r.getFilename());
}
}
} catch (Exception e) {
log.warn("SeparateProperties: Failure trying to get the separate properties: "+e);
m.clear();
m.put("ALL", getProperties());
}
*/
/*
m.put("ALL", getProperties());
*/
for (Entry<String, Properties> entry : propertiesFactoryBean.getLoadedProperties().entrySet()) {
m.put(entry.getKey(), dereferenceProperties(entry.getValue()));
}
return m;
}
/**
* INTERNAL
* @return the set of properties after processing
*/
public Properties getProperties() {
Properties rawProperties = getRawProperties();
Properties parsedProperties = dereferenceProperties(rawProperties);
return parsedProperties;
}
/**
* INTERNAL
* @return the complete set of properties exactly as read from the files
*/
public Properties getRawProperties() {
try {
return (Properties)propertiesFactoryBean.getObject();
} catch (IOException e) {
if (log.isWarnEnabled()) log.warn("Error collecting Sakai properties", e);
return new Properties();
}
}
/**
* Dereferences property placeholders in the given {@link Properties}
* in exactly the same way the {@link BeanFactoryPostProcessor}s in this
* object perform their placeholder dereferencing. Unfortunately, this
* process is not readily decoupled from the act of processing a
* bean factory in the Spring libraries. Hence the reflection.
*
* @param srcProperties a collection of name-value pairs
* @return a new collection of properties. If <code>srcProperties</code>
* is <code>null</code>, returns null. If <code>srcProperties</code>
* is empty, returns a reference to same object.
* @throws RuntimeException if any aspect of processing fails
*/
private Properties dereferenceProperties(Properties srcProperties) throws RuntimeException {
if ( srcProperties == null ) {
return null;
}
if ( srcProperties.isEmpty() ) {
return srcProperties;
}
try {
Properties parsedProperties = new Properties();
PropertyPlaceholderConfigurer resolver = new PropertyPlaceholderConfigurer();
resolver.setIgnoreUnresolvablePlaceholders(true);
Method parseStringValue =
resolver.getClass().getDeclaredMethod("parseStringValue", String.class, Properties.class, Set.class);
parseStringValue.setAccessible(true);
for ( Map.Entry<Object, Object> propEntry : srcProperties.entrySet() ) {
String parsedPropValue = (String)parseStringValue.invoke(resolver, (String)propEntry.getValue(), srcProperties, new HashSet<Object>());
parsedProperties.setProperty((String)propEntry.getKey(), parsedPropValue);
}
return parsedProperties;
} catch ( RuntimeException e ) {
throw e;
} catch ( Exception e ) {
throw new RuntimeException("Failed to dereference properties", e);
}
}
// Delegate properties loading.
public void setProperties(Properties properties) {
propertiesFactoryBean.setProperties(properties);
}
public void setPropertiesArray(Properties[] propertiesArray) {
propertiesFactoryBean.setPropertiesArray(propertiesArray);
}
public void setLocation(Resource location) {
propertiesFactoryBean.setLocation(location);
}
public void setLocations(Resource[] locations) {
propertiesFactoryBean.setLocations(locations);
}
public void setFileEncoding(String encoding) {
propertiesFactoryBean.setFileEncoding(encoding);
}
public void setIgnoreResourceNotFound(boolean ignoreResourceNotFound) {
propertiesFactoryBean.setIgnoreResourceNotFound(ignoreResourceNotFound);
}
public void setLocalOverride(boolean localOverride) {
propertiesFactoryBean.setLocalOverride(localOverride);
}
// Delegate PropertyPlaceholderConfigurer.
public void setIgnoreUnresolvablePlaceholders(boolean ignoreUnresolvablePlaceholders) {
propertyPlaceholderConfigurer.setIgnoreUnresolvablePlaceholders(ignoreUnresolvablePlaceholders);
}
public void setOrder(int order) {
propertyPlaceholderConfigurer.setOrder(order);
}
public void setPlaceholderPrefix(String placeholderPrefix) {
propertyPlaceholderConfigurer.setPlaceholderPrefix(placeholderPrefix);
}
public void setPlaceholderSuffix(String placeholderSuffix) {
propertyPlaceholderConfigurer.setPlaceholderSuffix(placeholderSuffix);
}
public void setSearchSystemEnvironment(boolean searchSystemEnvironment) {
propertyPlaceholderConfigurer.setSearchSystemEnvironment(searchSystemEnvironment);
}
public void setSystemPropertiesMode(int systemPropertiesMode) {
propertyPlaceholderConfigurer.setSystemPropertiesMode(systemPropertiesMode);
}
public void setSystemPropertiesModeName(String constantName) throws IllegalArgumentException {
propertyPlaceholderConfigurer.setSystemPropertiesModeName(constantName);
}
// Delegate PropertyOverrideConfigurer.
public void setBeanNameAtEnd(boolean beanNameAtEnd) {
propertyOverrideConfigurer.setBeanNameAtEnd(beanNameAtEnd);
}
public void setBeanNameSeparator(String beanNameSeparator) {
propertyOverrideConfigurer.setBeanNameSeparator(beanNameSeparator);
}
public void setIgnoreInvalidKeys(boolean ignoreInvalidKeys) {
propertyOverrideConfigurer.setIgnoreInvalidKeys(ignoreInvalidKeys);
}
/**
* Blatantly stolen from the Spring classes in order to get access to the properties files as they are read in,
* this could not be done by overrides because the stupid finals and private vars, this is why frameworks should
* never use final and private in their code.... sigh
*
* @author Spring Framework
* @author Aaron Zeckoski (azeckoski @ vt.edu)
*/
public class SakaiPropertiesFactoryBean implements FactoryBean, InitializingBean {
public static final String XML_FILE_EXTENSION = ".xml";
final Log log = LogFactory.getLog(SakaiPropertiesFactoryBean.class);
private Map<String, Properties> loadedProperties = new LinkedHashMap<String, Properties>();
/**
* @return a map of file -> properties for everything loaded here
*/
public Map<String, Properties> getLoadedProperties() {
return loadedProperties;
}
private Properties[] localProperties;
private Resource[] locations;
private boolean localOverride = false;
private boolean ignoreResourceNotFound = false;
private String fileEncoding;
private PropertiesPersister propertiesPersister = new DefaultPropertiesPersister();
private boolean singleton = true;
private Object singletonInstance;
public final void setSingleton(boolean singleton) {
// ignore this
}
public final boolean isSingleton() {
return this.singleton;
}
public final void afterPropertiesSet() throws IOException {
if (this.singleton) {
this.singletonInstance = createInstance();
}
}
public final Object getObject() throws IOException {
if (this.singleton) {
return this.singletonInstance;
} else {
return createInstance();
}
}
@SuppressWarnings("rawtypes")
public Class getObjectType() {
return Properties.class;
}
protected Object createInstance() throws IOException {
return mergeProperties();
}
public void setProperties(Properties properties) {
this.localProperties = new Properties[] {properties};
}
public void setPropertiesArray(Properties[] propertiesArray) { // unused
this.localProperties = propertiesArray;
}
public void setLocation(Resource location) { // unused
this.locations = new Resource[] {location};
}
public void setLocations(Resource[] locations) {
this.locations = locations;
}
public void setLocalOverride(boolean localOverride) {
this.localOverride = localOverride;
}
public void setIgnoreResourceNotFound(boolean ignoreResourceNotFound) {
this.ignoreResourceNotFound = ignoreResourceNotFound;
}
public void setFileEncoding(String encoding) {
this.fileEncoding = encoding;
}
public void setPropertiesPersister(PropertiesPersister propertiesPersister) {
this.propertiesPersister =
(propertiesPersister != null ? propertiesPersister : new DefaultPropertiesPersister());
}
/**
* Return a merged Properties instance containing both the loaded properties
* and properties set on this FactoryBean.
*/
protected Properties mergeProperties() throws IOException {
Properties result = new Properties();
if (this.localOverride) {
// Load properties from file upfront, to let local properties override.
loadProperties(result);
}
if (this.localProperties != null) {
for (int i = 0; i < this.localProperties.length; i++) {
loadedProperties.put("local"+i, this.localProperties[i]);
CollectionUtils.mergePropertiesIntoMap(this.localProperties[i], result);
}
}
if (!this.localOverride) {
// Load properties from file afterwards, to let those properties override.
loadProperties(result);
}
if (log.isInfoEnabled()) log.info("Loaded a total of "+result.size()+" properties");
return result;
}
/**
* Load properties into the given instance.
*
* @param props the Properties instance to load into
* @throws java.io.IOException in case of I/O errors
* @see #setLocations
*/
protected void loadProperties(Properties props) throws IOException {
if (this.locations != null) {
for (int i = 0; i < this.locations.length; i++) {
Resource location = this.locations[i];
if (log.isDebugEnabled()) {
log.debug("Loading properties file from " + location);
}
InputStream is = null;
try {
Properties p = new Properties();
is = location.getInputStream();
if (location.getFilename().endsWith(XML_FILE_EXTENSION)) {
this.propertiesPersister.loadFromXml(p, is);
} else {
if (this.fileEncoding != null) {
this.propertiesPersister.load(p, new InputStreamReader(is, this.fileEncoding));
} else {
this.propertiesPersister.load(p, is);
}
}
if (log.isInfoEnabled()) {
log.info("Loaded "+p.size()+" properties from file " + location);
}
loadedProperties.put(location.getFilename(), p);
props.putAll(p); // merge the properties
} catch (IOException ex) {
if (this.ignoreResourceNotFound) {
if (log.isWarnEnabled()) {
log.warn("Could not load properties from " + location + ": " + ex.getMessage());
}
} else {
throw ex;
}
} finally {
if (is != null) {
is.close();
}
}
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.utils.memory;
import java.lang.reflect.Field;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import com.sun.jna.Native;
import sun.misc.Unsafe;
import sun.nio.ch.DirectBuffer;
public abstract class MemoryUtil
{
private static final long UNSAFE_COPY_THRESHOLD = 1024 * 1024L; // copied from java.nio.Bits
private static final Unsafe unsafe;
private static final Class<?> DIRECT_BYTE_BUFFER_CLASS, RO_DIRECT_BYTE_BUFFER_CLASS;
private static final long DIRECT_BYTE_BUFFER_ADDRESS_OFFSET;
private static final long DIRECT_BYTE_BUFFER_CAPACITY_OFFSET;
private static final long DIRECT_BYTE_BUFFER_LIMIT_OFFSET;
private static final long DIRECT_BYTE_BUFFER_POSITION_OFFSET;
private static final long DIRECT_BYTE_BUFFER_ATTACHMENT_OFFSET;
private static final Class<?> BYTE_BUFFER_CLASS;
private static final long BYTE_BUFFER_OFFSET_OFFSET;
private static final long BYTE_BUFFER_HB_OFFSET;
private static final long BYTE_ARRAY_BASE_OFFSET;
private static final boolean BIG_ENDIAN = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN);
private static final boolean UNALIGNED;
public static final boolean INVERTED_ORDER;
static
{
String arch = System.getProperty("os.arch");
// Note that s390x architecture are not officially supported and adding it here is only done out of convenience
// for those that want to run C* on this architecture at their own risk (see #11214)
UNALIGNED = arch.equals("i386") || arch.equals("x86")
|| arch.equals("amd64") || arch.equals("x86_64") || arch.equals("s390x");
INVERTED_ORDER = UNALIGNED && !BIG_ENDIAN;
try
{
Field field = sun.misc.Unsafe.class.getDeclaredField("theUnsafe");
field.setAccessible(true);
unsafe = (sun.misc.Unsafe) field.get(null);
Class<?> clazz = ByteBuffer.allocateDirect(0).getClass();
DIRECT_BYTE_BUFFER_ADDRESS_OFFSET = unsafe.objectFieldOffset(Buffer.class.getDeclaredField("address"));
DIRECT_BYTE_BUFFER_CAPACITY_OFFSET = unsafe.objectFieldOffset(Buffer.class.getDeclaredField("capacity"));
DIRECT_BYTE_BUFFER_LIMIT_OFFSET = unsafe.objectFieldOffset(Buffer.class.getDeclaredField("limit"));
DIRECT_BYTE_BUFFER_POSITION_OFFSET = unsafe.objectFieldOffset(Buffer.class.getDeclaredField("position"));
DIRECT_BYTE_BUFFER_ATTACHMENT_OFFSET = unsafe.objectFieldOffset(clazz.getDeclaredField("att"));
DIRECT_BYTE_BUFFER_CLASS = clazz;
RO_DIRECT_BYTE_BUFFER_CLASS = ByteBuffer.allocateDirect(0).asReadOnlyBuffer().getClass();
clazz = ByteBuffer.allocate(0).getClass();
BYTE_BUFFER_OFFSET_OFFSET = unsafe.objectFieldOffset(ByteBuffer.class.getDeclaredField("offset"));
BYTE_BUFFER_HB_OFFSET = unsafe.objectFieldOffset(ByteBuffer.class.getDeclaredField("hb"));
BYTE_BUFFER_CLASS = clazz;
BYTE_ARRAY_BASE_OFFSET = unsafe.arrayBaseOffset(byte[].class);
}
catch (Exception e)
{
throw new AssertionError(e);
}
}
public static int pageSize()
{
return unsafe.pageSize();
}
public static long getAddress(ByteBuffer buffer)
{
assert buffer.getClass() == DIRECT_BYTE_BUFFER_CLASS;
return unsafe.getLong(buffer, DIRECT_BYTE_BUFFER_ADDRESS_OFFSET);
}
public static long allocate(long size)
{
return Native.malloc(size);
}
public static void free(long peer)
{
Native.free(peer);
}
public static void setByte(long address, byte b)
{
unsafe.putByte(address, b);
}
public static void setByte(long address, int count, byte b)
{
unsafe.setMemory(address, count, b);
}
public static void setShort(long address, short s)
{
unsafe.putShort(address, s);
}
public static void setInt(long address, int l)
{
if (UNALIGNED)
unsafe.putInt(address, l);
else
putIntByByte(address, l);
}
public static void setLong(long address, long l)
{
if (UNALIGNED)
unsafe.putLong(address, l);
else
putLongByByte(address, l);
}
public static byte getByte(long address)
{
return unsafe.getByte(address);
}
public static int getShort(long address)
{
return UNALIGNED ? unsafe.getShort(address) & 0xffff : getShortByByte(address);
}
public static int getInt(long address)
{
return UNALIGNED ? unsafe.getInt(address) : getIntByByte(address);
}
public static long getLong(long address)
{
return UNALIGNED ? unsafe.getLong(address) : getLongByByte(address);
}
public static ByteBuffer getByteBuffer(long address, int length)
{
return getByteBuffer(address, length, ByteOrder.nativeOrder());
}
public static ByteBuffer getByteBuffer(long address, int length, ByteOrder order)
{
ByteBuffer instance = getHollowDirectByteBuffer(order);
setByteBuffer(instance, address, length);
return instance;
}
public static ByteBuffer getHollowDirectByteBuffer()
{
return getHollowDirectByteBuffer(ByteOrder.nativeOrder());
}
public static ByteBuffer getHollowDirectByteBuffer(ByteOrder order)
{
ByteBuffer instance;
try
{
instance = (ByteBuffer) unsafe.allocateInstance(DIRECT_BYTE_BUFFER_CLASS);
}
catch (InstantiationException e)
{
throw new AssertionError(e);
}
instance.order(order);
return instance;
}
public static ByteBuffer getHollowByteBuffer()
{
ByteBuffer instance;
try
{
instance = (ByteBuffer) unsafe.allocateInstance(BYTE_BUFFER_CLASS);
}
catch (InstantiationException e)
{
throw new AssertionError(e);
}
instance.order(ByteOrder.nativeOrder());
return instance;
}
public static void setByteBuffer(ByteBuffer instance, long address, int length)
{
unsafe.putLong(instance, DIRECT_BYTE_BUFFER_ADDRESS_OFFSET, address);
unsafe.putInt(instance, DIRECT_BYTE_BUFFER_CAPACITY_OFFSET, length);
unsafe.putInt(instance, DIRECT_BYTE_BUFFER_LIMIT_OFFSET, length);
}
public static Object getAttachment(ByteBuffer instance)
{
assert instance.getClass() == DIRECT_BYTE_BUFFER_CLASS;
return unsafe.getObject(instance, DIRECT_BYTE_BUFFER_ATTACHMENT_OFFSET);
}
public static void setAttachment(ByteBuffer instance, Object next)
{
assert instance.getClass() == DIRECT_BYTE_BUFFER_CLASS;
unsafe.putObject(instance, DIRECT_BYTE_BUFFER_ATTACHMENT_OFFSET, next);
}
public static ByteBuffer duplicateDirectByteBuffer(ByteBuffer source, ByteBuffer hollowBuffer)
{
assert source.getClass() == DIRECT_BYTE_BUFFER_CLASS || source.getClass() == RO_DIRECT_BYTE_BUFFER_CLASS;
unsafe.putLong(hollowBuffer, DIRECT_BYTE_BUFFER_ADDRESS_OFFSET, unsafe.getLong(source, DIRECT_BYTE_BUFFER_ADDRESS_OFFSET));
unsafe.putInt(hollowBuffer, DIRECT_BYTE_BUFFER_POSITION_OFFSET, unsafe.getInt(source, DIRECT_BYTE_BUFFER_POSITION_OFFSET));
unsafe.putInt(hollowBuffer, DIRECT_BYTE_BUFFER_LIMIT_OFFSET, unsafe.getInt(source, DIRECT_BYTE_BUFFER_LIMIT_OFFSET));
unsafe.putInt(hollowBuffer, DIRECT_BYTE_BUFFER_CAPACITY_OFFSET, unsafe.getInt(source, DIRECT_BYTE_BUFFER_CAPACITY_OFFSET));
return hollowBuffer;
}
public static long getLongByByte(long address)
{
if (BIG_ENDIAN)
{
return (((long) unsafe.getByte(address ) ) << 56) |
(((long) unsafe.getByte(address + 1) & 0xff) << 48) |
(((long) unsafe.getByte(address + 2) & 0xff) << 40) |
(((long) unsafe.getByte(address + 3) & 0xff) << 32) |
(((long) unsafe.getByte(address + 4) & 0xff) << 24) |
(((long) unsafe.getByte(address + 5) & 0xff) << 16) |
(((long) unsafe.getByte(address + 6) & 0xff) << 8) |
(((long) unsafe.getByte(address + 7) & 0xff) );
}
else
{
return (((long) unsafe.getByte(address + 7) ) << 56) |
(((long) unsafe.getByte(address + 6) & 0xff) << 48) |
(((long) unsafe.getByte(address + 5) & 0xff) << 40) |
(((long) unsafe.getByte(address + 4) & 0xff) << 32) |
(((long) unsafe.getByte(address + 3) & 0xff) << 24) |
(((long) unsafe.getByte(address + 2) & 0xff) << 16) |
(((long) unsafe.getByte(address + 1) & 0xff) << 8) |
(((long) unsafe.getByte(address ) & 0xff) );
}
}
public static int getIntByByte(long address)
{
if (BIG_ENDIAN)
{
return (((int) unsafe.getByte(address ) ) << 24) |
(((int) unsafe.getByte(address + 1) & 0xff) << 16) |
(((int) unsafe.getByte(address + 2) & 0xff) << 8 ) |
(((int) unsafe.getByte(address + 3) & 0xff) );
}
else
{
return (((int) unsafe.getByte(address + 3) ) << 24) |
(((int) unsafe.getByte(address + 2) & 0xff) << 16) |
(((int) unsafe.getByte(address + 1) & 0xff) << 8) |
(((int) unsafe.getByte(address ) & 0xff) );
}
}
public static int getShortByByte(long address)
{
if (BIG_ENDIAN)
{
return (((int) unsafe.getByte(address ) ) << 8) |
(((int) unsafe.getByte(address + 1) & 0xff) );
}
else
{
return (((int) unsafe.getByte(address + 1) ) << 8) |
(((int) unsafe.getByte(address ) & 0xff) );
}
}
public static void putLongByByte(long address, long value)
{
if (BIG_ENDIAN)
{
unsafe.putByte(address, (byte) (value >> 56));
unsafe.putByte(address + 1, (byte) (value >> 48));
unsafe.putByte(address + 2, (byte) (value >> 40));
unsafe.putByte(address + 3, (byte) (value >> 32));
unsafe.putByte(address + 4, (byte) (value >> 24));
unsafe.putByte(address + 5, (byte) (value >> 16));
unsafe.putByte(address + 6, (byte) (value >> 8));
unsafe.putByte(address + 7, (byte) (value));
}
else
{
unsafe.putByte(address + 7, (byte) (value >> 56));
unsafe.putByte(address + 6, (byte) (value >> 48));
unsafe.putByte(address + 5, (byte) (value >> 40));
unsafe.putByte(address + 4, (byte) (value >> 32));
unsafe.putByte(address + 3, (byte) (value >> 24));
unsafe.putByte(address + 2, (byte) (value >> 16));
unsafe.putByte(address + 1, (byte) (value >> 8));
unsafe.putByte(address, (byte) (value));
}
}
public static void putIntByByte(long address, int value)
{
if (BIG_ENDIAN)
{
unsafe.putByte(address, (byte) (value >> 24));
unsafe.putByte(address + 1, (byte) (value >> 16));
unsafe.putByte(address + 2, (byte) (value >> 8));
unsafe.putByte(address + 3, (byte) (value));
}
else
{
unsafe.putByte(address + 3, (byte) (value >> 24));
unsafe.putByte(address + 2, (byte) (value >> 16));
unsafe.putByte(address + 1, (byte) (value >> 8));
unsafe.putByte(address, (byte) (value));
}
}
public static void setBytes(long address, ByteBuffer buffer)
{
int start = buffer.position();
int count = buffer.limit() - start;
if (count == 0)
return;
if (buffer.isDirect())
setBytes(((DirectBuffer)buffer).address() + start, address, count);
else
setBytes(address, buffer.array(), buffer.arrayOffset() + start, count);
}
/**
* Transfers count bytes from buffer to Memory
*
* @param address start offset in the memory
* @param buffer the data buffer
* @param bufferOffset start offset of the buffer
* @param count number of bytes to transfer
*/
public static void setBytes(long address, byte[] buffer, int bufferOffset, int count)
{
assert buffer != null;
assert !(bufferOffset < 0 || count < 0 || bufferOffset + count > buffer.length);
setBytes(buffer, bufferOffset, address, count);
}
public static void setBytes(long src, long trg, long count)
{
while (count > 0)
{
long size = (count> UNSAFE_COPY_THRESHOLD) ? UNSAFE_COPY_THRESHOLD : count;
unsafe.copyMemory(src, trg, size);
count -= size;
src += size;
trg+= size;
}
}
public static void setBytes(byte[] src, int offset, long trg, long count)
{
while (count > 0)
{
long size = (count> UNSAFE_COPY_THRESHOLD) ? UNSAFE_COPY_THRESHOLD : count;
unsafe.copyMemory(src, BYTE_ARRAY_BASE_OFFSET + offset, null, trg, size);
count -= size;
offset += size;
trg += size;
}
}
/**
* Transfers count bytes from Memory starting at memoryOffset to buffer starting at bufferOffset
*
* @param address start offset in the memory
* @param buffer the data buffer
* @param bufferOffset start offset of the buffer
* @param count number of bytes to transfer
*/
public static void getBytes(long address, byte[] buffer, int bufferOffset, int count)
{
if (buffer == null)
throw new NullPointerException();
else if (bufferOffset < 0 || count < 0 || count > buffer.length - bufferOffset)
throw new IndexOutOfBoundsException();
else if (count == 0)
return;
unsafe.copyMemory(null, address, buffer, BYTE_ARRAY_BASE_OFFSET + bufferOffset, count);
}
}
| |
package com.voterguide;
import com.google.android.gms.analytics.HitBuilders;
import com.google.android.gms.analytics.Tracker;
import com.nostra13.universalimageloader.core.DisplayImageOptions;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.voterguide.helpers.DataHelper;
import com.voterguide.helpers.Utils;
import com.voterguide.model.Candidate;
import com.voterguide.model.City;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarActivity;
import android.annotation.SuppressLint;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
public class DetailPageActivity extends ActionBarActivity implements
OnClickListener {
Candidate candidate = null;
City city = null;
String candidateName = null;
@SuppressLint("NewApi")
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detail_page);
ActionBar actionBar = getSupportActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
candidate = (Candidate) getIntent().getSerializableExtra("candidate");
city = getIntent().getParcelableExtra("city");
candidate = DataHelper.getCandidateDetail(this, candidate);
ImageView profileImage = (ImageView) findViewById(R.id.profile_image);
ImageLoader imageLoader = ImageLoader.getInstance();
DisplayImageOptions options = new DisplayImageOptions.Builder()
.showImageOnLoading(R.drawable.nophoto) // resource or drawable
.showImageForEmptyUri(R.drawable.nophoto) // resource or
.showImageOnFail(R.drawable.nophoto) // resource or drawable
.cacheInMemory(true).build();
imageLoader.displayImage(candidate.getLfdaPhotoUrl(), profileImage,
options);
String rowValue = candidate.getFirstName() + " "
+ candidate.getLastName() + " ("
+ candidate.getParty().toUpperCase() + ")";
candidateName = rowValue;
((TextView) findViewById(R.id.candidate_name)).setText(rowValue);
((TextView) findViewById(R.id.candidate_office)).setText("Office: "
+ candidate.getOffice());
((TextView) findViewById(R.id.candidate_incumbent))
.setText("Incumbent: "
+ ((candidate.isIncumbent()) ? "Yes" : "No"));
String distrinctString = "";
if (!candidate.getOffice().equals("Governor")
&& candidate.getDistrict() != null
&& !candidate.getDistrict().isEmpty()) {
distrinctString = "Distr: " + candidate.getDistrict();
if (city != null) {
distrinctString += ", " + city.getCityName() + " ("
+ city.getCountyName() + ")";
} else if (candidate.getCounty() != null
&& !candidate.getCounty().isEmpty()) {
distrinctString += ", " + candidate.getCounty();
}
}
((TextView) findViewById(R.id.candidate_distr))
.setText(distrinctString);
String experience = (candidate.getExperience() != null && !candidate
.getExperience().isEmpty()) ? candidate.getExperience() : getString(R.string.no_info_available);
String education = (candidate.getEducation() != null && !candidate
.getEducation().isEmpty()) ? candidate.getEducation() : getString(R.string.no_info_available);
String family = (candidate.getFamily() != null && !candidate
.getFamily().isEmpty()) ? candidate.getFamily() : getString(R.string.no_info_available);
String email = (candidate.getEmail() != null && !candidate.getEmail()
.isEmpty()) ? candidate.getEmail() : getString(R.string.no_info_available);
String phone = (candidate.getPhone() != null && !candidate.getPhone()
.isEmpty()) ? candidate.getPhone() : getString(R.string.no_info_available);
String website = (candidate.getWebsite() != null && !candidate
.getWebsite().isEmpty()) ? candidate.getWebsite() : getString(R.string.no_info_available);
((TextView) findViewById(R.id.experience_value)).setText(experience);
((TextView) findViewById(R.id.education_value)).setText(education);
((TextView) findViewById(R.id.family_value)).setText(family);
((TextView) findViewById(R.id.email_value)).setText(email);
((TextView) findViewById(R.id.phone_value)).setText(phone);
((TextView) findViewById(R.id.website_value)).setText(website);
View viewIssuesOnPosition = findViewById(R.id.view_position_view);
viewIssuesOnPosition.setOnClickListener(this);
View reportDataError = findViewById(R.id.info_quality_view);
reportDataError.setOnClickListener(this);
View viewProfileInBrowser = findViewById(R.id.view_full_profile_view);
viewProfileInBrowser.setOnClickListener(this);
ImageView websiteButton = (ImageView) findViewById(R.id.visit_website);
ImageView copyWebsiteButton = (ImageView) findViewById(R.id.copy_website);
if (candidate.getWebsite() != null && !candidate.getWebsite().isEmpty()) {
websiteButton.setOnClickListener(this);
copyWebsiteButton.setOnClickListener(this);
} else {
websiteButton.setColorFilter(Color.GRAY);
copyWebsiteButton.setColorFilter(Color.GRAY);
}
ImageView emailButton = (ImageView) findViewById(R.id.send_email);
ImageView copyEmailButton = (ImageView) findViewById(R.id.copy_email);
if (candidate.getEmail() != null && !candidate.getEmail().isEmpty()) {
emailButton.setOnClickListener(this);
copyEmailButton.setOnClickListener(this);
} else {
emailButton.setColorFilter(Color.GRAY);
copyEmailButton.setColorFilter(Color.GRAY);
}
ImageView callButton = (ImageView) findViewById(R.id.call_button);
if (candidate.getPhone() != null && !candidate.getPhone().isEmpty()) {
View callWrapper = findViewById(R.id.call_button_wrapper);
callWrapper.setOnClickListener(this);
} else {
callButton.setColorFilter(Color.GRAY);
}
}
@Override
public void onResume() {
super.onResume(); // Always call the superclass method first
trackView();
}
private void trackView() {
// Get tracker.
Tracker t = ((MyApplication) getApplication()).getTracker();
// Set screen name.
// Where path is a String representing the screen name.
t.setScreenName("CandidateProfile");
// Send a screen view.
t.send(new HitBuilders.AppViewBuilder().build());
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
// getMenuInflater().inflate(R.menu.detail_page, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
switch (item.getItemId()) {
case android.R.id.home:
finish();
}
return super.onOptionsItemSelected(item);
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
int viewId = v.getId();
switch (viewId) {
case R.id.view_position_view:
Intent viewPositionActivity = new Intent(DetailPageActivity.this,
IssuesOnProfileActivity.class);
viewPositionActivity.putExtra("candidate", candidate);
startActivity(viewPositionActivity);
break;
case R.id.view_full_profile_view:
Utils.alertMessage(DetailPageActivity.this,
getString(R.string.exit_to_profile_prompt),
viewProfileInBrowserDialogListener());
break;
case R.id.visit_website:
Utils.alertMessage(DetailPageActivity.this,
getString(R.string.exit_to_website_promote),
viewWebsiteDialogListener());
break;
case R.id.copy_website:
ClipboardManager clipboard = (ClipboardManager) getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clip = ClipData.newPlainText("", candidate.getWebsite());
clipboard.setPrimaryClip(clip);
Toast toast = Toast.makeText(this, R.string.copy_website, 500);
toast.show();
break;
case R.id.send_email:
openCandidateEmailDialog();
break;
case R.id.copy_email:
ClipboardManager copyclipboard = (ClipboardManager) getSystemService(Context.CLIPBOARD_SERVICE);
ClipData copyclip = ClipData.newPlainText("", candidate.getEmail());
copyclipboard.setPrimaryClip(copyclip);
Toast copytoast = Toast.makeText(this, R.string.copy_email, 500);
copytoast.show();
break;
case R.id.call_button_wrapper:
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY)) {
String callPhonePrompt = String.format(getString(R.string.call_phone_prompt), candidate.getPhone());
Utils.alertMessage(DetailPageActivity.this, callPhonePrompt,clickToCallDialogListener());
}else{
Toast callToast = Toast.makeText(this, R.string.call_not_supported_on_device, 2000);
callToast.show();
}
break;
case R.id.info_quality_view:
openDataReportEmailDialog();
break;
}
}
public DialogInterface.OnClickListener viewProfileInBrowserDialogListener() {
DialogInterface.OnClickListener dialogClickListener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case DialogInterface.BUTTON_POSITIVE:
Intent browserIntent = new Intent(Intent.ACTION_VIEW,
Uri.parse(candidate.getLfdaProfileUrl()));
startActivity(browserIntent);
break;
}
}
};
return dialogClickListener;
}
public DialogInterface.OnClickListener clickToCallDialogListener() {
DialogInterface.OnClickListener dialogClickListener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case DialogInterface.BUTTON_POSITIVE:
Intent callIntent = new Intent(Intent.ACTION_CALL);
String phoneString = candidate.getPhone();
phoneString = phoneString.replaceAll("[^-?0-9]+", "");
Log.i("call","phone string =="+phoneString);
callIntent.setData(Uri.parse("tel:"+ phoneString));
startActivity(callIntent);
break;
}
}
};
return dialogClickListener;
}
public DialogInterface.OnClickListener viewWebsiteDialogListener() {
DialogInterface.OnClickListener dialogClickListener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case DialogInterface.BUTTON_POSITIVE:
Intent browserIntent = new Intent(Intent.ACTION_VIEW,
Uri.parse(candidate.getWebsite()));
startActivity(browserIntent);
break;
}
}
};
return dialogClickListener;
}
public void openDataReportEmailDialog() {
Intent intent = new Intent(Intent.ACTION_SEND);
//intent.setData(Uri.parse("mailto:"));
intent.setType("message/rfc822");
intent.putExtra(Intent.EXTRA_EMAIL,
new String[] { getString(R.string.report_error_recipient) });
intent.putExtra(Intent.EXTRA_SUBJECT,
getString(R.string.report_error_subject));
String emailBody = String.format(getString(R.string.report_error_body),
candidateName);
intent.putExtra(Intent.EXTRA_TEXT, emailBody);
startActivity(Intent.createChooser(intent, "Report Data Error"));
}
public void openCandidateEmailDialog() {
Intent intent = new Intent(Intent.ACTION_SEND);
intent.setData(Uri.parse("mailto:"));
intent.setType("message/rfc822");
intent.putExtra(Intent.EXTRA_EMAIL, new String[] {candidate.getEmail() });
intent.putExtra(Intent.EXTRA_SUBJECT,"");
intent.putExtra(Intent.EXTRA_TEXT, getString(R.string.email_candidate_body));
startActivity(Intent.createChooser(intent, "Send mail to candidate"));
}
}
| |
/**
* Bars Feedback System
*/
package com.example.psbarsfeedback;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;
public class MainActivity extends Activity {
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int TIME_FRAME_IN_MILIS = 2000; // The value of each chunk
private AudioRecord recorder;
private Thread streamingThread;
private boolean isRecording = false;
private int BufferElements2Rec = MainActivity.RECORDER_SAMPLERATE
* (TIME_FRAME_IN_MILIS/1000); // total number of elements to buffer is equal
// number of samples per sec * the value of each chunk
private int BytesPerElement = 2; // 2 bytes in 16bit format
private SocketCommTx comm;
private String serverIP = "192.168.1.103"; //CHECK IP ADDRESS*****
static int volumeValue, speedValue;
double lowerThresholdVol = 54.0; //CALIBRATE
double upperThresholdVol = 58.8; //CALIBRATE
double maxThresholdVol = 67; //CALIBRATE
double lowerThresholdRate = 1.65; //CALIBRATE
double upperThresholdRate = 2.7; //CALIBRATE
public synchronized void setValues(String loudness, String rate) {
double avgLoud = Double.parseDouble(loudness);
double avgRateF = Double.parseDouble(rate);
Log.i("debugRun", "avgLoud: " + avgLoud + " avgRateF: " + avgRateF);
checkInputData(avgLoud, avgRateF);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
}
@Override
protected void onResume() {
super.onResume();
loadCalibData();
readIPaddress();
comm = new SocketCommTx(serverIP, 9090, this);
startRecording();
}
private void loadCalibData() {
File root = android.os.Environment.getExternalStorageDirectory();
File dir = new File (root.getAbsolutePath() + "/ROCHCIStorage");
File file = new File(dir, "ROCSpeakGlass_Calibration.info");
if(!file.exists()){
Toast.makeText(getApplicationContext(), "No Calibration Data Found", Toast.LENGTH_LONG).show();
return;
}
try {
BufferedReader br = new BufferedReader(new FileReader(file));
String maxLoudVal = br.readLine();
if(maxLoudVal == null) {
Toast.makeText(getApplicationContext(), "No Calibration Data Found", Toast.LENGTH_LONG).show();
return;
}
maxThresholdVol = Double.parseDouble(maxLoudVal);
// Toast.makeText(getApplicationContext(), "max=" + maxThresholdVol, Toast.LENGTH_LONG).show();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
Toast.makeText(getApplicationContext(), "No Calibration Data Found", Toast.LENGTH_LONG).show();
e.printStackTrace();
}
} //end loadCalibData
private void readIPaddress(){
File root = android.os.Environment.getExternalStorageDirectory();
File dir = new File (root.getAbsolutePath() + "/ROCHCIStorage");
File file = new File(dir, "ipaddress.txt");
try {
BufferedReader br = new BufferedReader(new FileReader(file));
serverIP = br.readLine(); //***
Log.i("debugRun", "serverIP: " + serverIP);
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
protected void onPause() {
stopRecording();
comm.stopThread();
super.onPause();
}
@Override
protected void onStop() {
super.onStop();
int pid = android.os.Process.myPid();
android.os.Process.killProcess(pid);
}
public void checkInputData(double avgLoud, double avgRateF) {
// LOW=1 // MID=2 // HIGH=3 //
if(avgLoud <= lowerThresholdVol)
volumeValue = 1;
else if(avgLoud > lowerThresholdVol && avgLoud < upperThresholdVol)
volumeValue = 2;
else if(avgLoud >= upperThresholdVol)
volumeValue = 3;
if(avgRateF <= lowerThresholdRate)
speedValue = 1;
else if(avgRateF > lowerThresholdRate && avgRateF < upperThresholdRate)
speedValue = 2;
else if(avgRateF >= upperThresholdRate)
speedValue = 3;
Log.i("debugRun", " volumeValue: " + volumeValue + " speedValue: " + speedValue );
//volume LOW && speed LOW
if(volumeValue == 1 && speedValue == 1) {
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b6);
}
//volume LOW && speed MID
if(volumeValue == 1 && speedValue == 2) {
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b6);
}
//volume LOW && speed HIGH
if(volumeValue == 1 && speedValue == 3) {
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b6);
}
//volume MID && speed LOW
if(volumeValue == 2 && speedValue == 1) {
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b6);
}
//volume MID && speed MID
if(volumeValue == 2 && speedValue == 2) {
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b6);
}
//volume MID && speed HIGH
if(volumeValue == 2 && speedValue == 3) {
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b6);
}
//volume HIGH && speed LOW
if(volumeValue == 3 && speedValue == 1) {
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b6);
}
//volume HIGH && speed MID
if(volumeValue == 3 && speedValue == 2) {
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b6);
}
//volume HIGH && speed HIGH
if(volumeValue == 3 && speedValue == 3) {
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b1);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b2);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b3);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b4);
MySurfaceView.graphicsThread.changeState(1, 0, GraphicsThread.b5);
MySurfaceView.graphicsThread.changeState(0, 1, GraphicsThread.b6);
}
} //end check
//****************************************************************************************************
//Write the output audio in byte
private void writeAudioDataToSocket() {
short sData[] = new short[BufferElements2Rec]; // buffer to store the elements, using
// short because encoding format is 16 bit PCM
while (isRecording) {
// gets the voice output from microphone to byte format
int read = recorder.read(sData, 0, BufferElements2Rec);
byte bData[] = short2byte(sData);
comm.fillBuffer(bData);
//Log.d("Extracted_Data", new String(bData));
//comm.fillBuffer(bData);
}
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void startRecording() {
// Calculate the required buffersize for the desired sample rate and encoding rate
int bufferSize = determineMinimumBufferSize(RECORDER_SAMPLERATE, RECORDER_AUDIO_ENCODING);
// Prepare the recorder
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, bufferSize);
recorder.startRecording();
isRecording = true;
Log.d("MainActivity_startRecording", "recording started");
// Running thread for writing audio data
streamingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToSocket();
}
}, "AudioRecorder Thread");
streamingThread.start();
Log.d("MainActivity_startRecording", "recording started");
}
//Stops the recording activity
private void stopRecording() {
if (recorder != null) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
streamingThread = null;
}
}
// Function for calculating minimum buffer size
private int determineMinimumBufferSize(final int sampleRate, int encoding) {
int minBufferSize =
AudioRecord.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_IN_MONO, encoding);
return minBufferSize>BufferElements2Rec*BytesPerElement?
minBufferSize:BufferElements2Rec * BytesPerElement;
}
}//end class
| |
package org.apache.cassandra.db.marshal;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import static org.junit.Assert.assertEquals;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.Random;
import java.util.UUID;
import org.junit.Test;
import org.junit.Assert;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.UUIDGen;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UUIDTypeTest
{
private static final Logger logger = LoggerFactory.getLogger(UUIDTypeTest.class);
UUIDType uuidType = new UUIDType();
@Test
public void testRandomCompare()
{
UUID t1 = UUIDGen.getTimeUUID();
UUID t2 = UUIDGen.getTimeUUID();
testCompare(null, t2, -1);
testCompare(t1, null, 1);
testCompare(t1, t2, -1);
testCompare(t1, t1, 0);
testCompare(t2, t2, 0);
UUID nullId = new UUID(0, 0);
testCompare(nullId, t1, -1);
testCompare(t2, nullId, 1);
testCompare(nullId, nullId, 0);
for (int test = 1; test < 32; test++)
{
UUID r1 = UUID.randomUUID();
UUID r2 = UUID.randomUUID();
testCompare(r1, r2, compareUUID(r1, r2));
testCompare(r1, r1, 0);
testCompare(r2, r2, 0);
testCompare(t1, r1, -1);
testCompare(r2, t2, 1);
}
}
public static int compareUnsigned(long n1, long n2)
{
if (n1 == n2)
{
return 0;
}
if ((n1 < n2) ^ ((n1 < 0) != (n2 < 0)))
{
return -1;
}
return 1;
}
public static int compareUUID(UUID u1, UUID u2)
{
int c = compareUnsigned(u1.getMostSignificantBits(),
u2.getMostSignificantBits());
if (c != 0)
{
return c;
}
return compareUnsigned(u1.getLeastSignificantBits(),
u2.getLeastSignificantBits());
}
public String describeCompare(UUID u1, UUID u2, int c)
{
String tb1 = (u1 == null) ? "null" : (u1.version() == 1) ? "time-based " : "random ";
String tb2 = (u2 == null) ? "null" : (u2.version() == 1) ? "time-based " : "random ";
String comp = (c < 0) ? " < " : ((c == 0) ? " = " : " > ");
return tb1 + u1 + comp + tb2 + u2;
}
public int sign(int i)
{
if (i < 0)
{
return -1;
}
if (i > 0)
{
return 1;
}
return 0;
}
public static ByteBuffer bytebuffer(UUID uuid)
{
if (uuid == null)
return ByteBufferUtil.EMPTY_BYTE_BUFFER;
long msb = uuid.getMostSignificantBits();
long lsb = uuid.getLeastSignificantBits();
byte[] bytes = new byte[16];
for (int i = 0; i < 8; i++)
{
bytes[i] = (byte) (msb >>> 8 * (7 - i));
}
for (int i = 8; i < 16; i++)
{
bytes[i] = (byte) (lsb >>> 8 * (7 - i));
}
return ByteBuffer.wrap(bytes);
}
public void logJdkUUIDCompareToVariance(UUID u1, UUID u2, int expC)
{
if ((u1 == null) || (u2 == null))
return;
if (u1.version() != u2.version())
return;
if (u1.version() == 1)
return;
if (u1.compareTo(u2) != expC)
logger.info("*** Note: java.util.UUID.compareTo() would have compared this differently");
}
public void testCompare(UUID u1, UUID u2, int expC)
{
int c = sign(uuidType.compare(bytebuffer(u1), bytebuffer(u2)));
expC = sign(expC);
assertEquals("Expected " + describeCompare(u1, u2, expC) + ", got " + describeCompare(u1, u2, c), expC, c);
if (((u1 != null) && (u1.version() == 1)) && ((u2 != null) && (u2.version() == 1)))
assertEquals(c, sign(TimeUUIDType.instance.compare(bytebuffer(u1), bytebuffer(u2))));
logJdkUUIDCompareToVariance(u1, u2, c);
}
@Test
public void testTimeEquality()
{
UUID a = UUIDGen.getTimeUUID();
UUID b = new UUID(a.getMostSignificantBits(),
a.getLeastSignificantBits());
assertEquals(0, uuidType.compare(bytebuffer(a), bytebuffer(b)));
}
@Test
public void testTimeSmaller()
{
UUID a = UUIDGen.getTimeUUID();
UUID b = UUIDGen.getTimeUUID();
UUID c = UUIDGen.getTimeUUID();
assert uuidType.compare(bytebuffer(a), bytebuffer(b)) < 0;
assert uuidType.compare(bytebuffer(b), bytebuffer(c)) < 0;
assert uuidType.compare(bytebuffer(a), bytebuffer(c)) < 0;
}
@Test
public void testTimeBigger()
{
UUID a = UUIDGen.getTimeUUID();
UUID b = UUIDGen.getTimeUUID();
UUID c = UUIDGen.getTimeUUID();
assert uuidType.compare(bytebuffer(c), bytebuffer(b)) > 0;
assert uuidType.compare(bytebuffer(b), bytebuffer(a)) > 0;
assert uuidType.compare(bytebuffer(c), bytebuffer(a)) > 0;
}
@Test
public void testPermutations()
{
compareAll(random(1000, (byte) 0x00, (byte) 0x10, (byte) 0x20));
for (ByteBuffer[] permutations : permutations(10, (byte) 0x00, (byte) 0x10, (byte) 0x20))
compareAll(permutations);
}
private void compareAll(ByteBuffer[] uuids)
{
for (int i = 0 ; i < uuids.length ; i++)
{
for (int j = i + 1 ; j < uuids.length ; j++)
{
ByteBuffer bi = uuids[i];
ByteBuffer bj = uuids[j];
UUID ui = UUIDGen.getUUID(bi);
UUID uj = UUIDGen.getUUID(bj);
int c = uuidType.compare(bi, bj);
if (ui.version() != uj.version())
{
Assert.assertTrue(isComparisonEquivalent(ui.version() - uj.version(), c));
}
else if (ui.version() == 1)
{
long i0 = ui.timestamp();
long i1 = uj.timestamp();
if (i0 == i1) Assert.assertTrue(isComparisonEquivalent(ByteBufferUtil.compareUnsigned(bi, bj), c));
else Assert.assertTrue(isComparisonEquivalent(Long.compare(i0, i1), c));
}
else
{
Assert.assertTrue(isComparisonEquivalent(ByteBufferUtil.compareUnsigned(bi, bj), c));
}
Assert.assertTrue(isComparisonEquivalent(compareV1(bi, bj), c));
}
}
}
private static boolean isComparisonEquivalent(int c1, int c2)
{
c1 = c1 < -1 ? -1 : c1 > 1 ? 1 : c1;
c2 = c2 < -1 ? -1 : c2 > 1 ? 1 : c2;
return c1 == c2;
}
// produce randomCount random byte strings, and permute every possible byte within each
// for all provided types, using permute()
static Iterable<ByteBuffer[]> permutations(final int randomCount, final byte ... types)
{
final Random random = new Random();
long seed = random.nextLong();
random.setSeed(seed);
System.out.println("UUIDTypeTest.permutations.seed=" + seed);
return new Iterable<ByteBuffer[]>()
{
public Iterator<ByteBuffer[]> iterator()
{
return new Iterator<ByteBuffer[]>()
{
byte[] bytes = new byte[16];
int c = -1, i = 16;
public boolean hasNext()
{
return i < 16 || c < randomCount - 1;
}
public ByteBuffer[] next()
{
if (i == 16)
{
random.nextBytes(bytes);
i = 0;
c++;
}
return permute(bytes, i++, types);
}
public void remove()
{
}
};
}
};
}
// for each of the given UUID types provided, produce every possible
// permutation of the provided byte[] for the given index
static ByteBuffer[] permute(byte[] src, int byteIndex, byte ... types)
{
assert src.length == 16;
assert byteIndex < 16;
byte[] bytes = src.clone();
ByteBuffer[] permute;
if (byteIndex == 6)
{
permute = new ByteBuffer[16 * types.length];
for (int i = 0 ; i < types.length ; i++)
{
for (int j = 0 ; j < 16 ; j++)
{
int k = i * 16 + j;
bytes[6] = (byte)(types[i] | j);
permute[k] = ByteBuffer.wrap(bytes.clone());
}
}
}
else
{
permute = new ByteBuffer[256 * types.length];
for (int i = 0 ; i < types.length ; i++)
{
bytes[6] = types[i];
for (int j = 0 ; j < 256 ; j++)
{
int k = i * 256 + j;
bytes[byteIndex] = (byte) ((bytes[byteIndex] & 0x0F) | i);
permute[k] = ByteBuffer.wrap(bytes.clone());
}
}
}
return permute;
}
static ByteBuffer[] random(int count, byte ... types)
{
Random random = new Random();
long seed = random.nextLong();
random.setSeed(seed);
System.out.println("UUIDTypeTest.random.seed=" + seed);
ByteBuffer[] uuids = new ByteBuffer[count * types.length];
for (int i = 0 ; i < types.length ; i++)
{
for (int j = 0; j < count; j++)
{
int k = (i * count) + j;
uuids[k] = ByteBuffer.allocate(16);
random.nextBytes(uuids[k].array());
// set version to 1
uuids[k].array()[6] &= 0x0F;
uuids[k].array()[6] |= types[i];
}
}
return uuids;
}
private static int compareV1(ByteBuffer b1, ByteBuffer b2)
{
// Compare for length
if ((b1 == null) || (b1.remaining() < 16))
{
return ((b2 == null) || (b2.remaining() < 16)) ? 0 : -1;
}
if ((b2 == null) || (b2.remaining() < 16))
{
return 1;
}
int s1 = b1.position();
int s2 = b2.position();
// Compare versions
int v1 = (b1.get(s1 + 6) >> 4) & 0x0f;
int v2 = (b2.get(s2 + 6) >> 4) & 0x0f;
if (v1 != v2)
{
return v1 - v2;
}
// Compare timestamps for version 1
if (v1 == 1)
{
// if both time-based, compare as timestamps
int c = compareTimestampBytes(b1, b2);
if (c != 0)
{
return c;
}
}
// Compare the two byte arrays starting from the first
// byte in the sequence until an inequality is
// found. This should provide equivalent results
// to the comparison performed by the RFC 4122
// Appendix A - Sample Implementation.
// Note: java.util.UUID.compareTo is not a lexical
// comparison
for (int i = 0; i < 16; i++)
{
int c = ((b1.get(s1 + i)) & 0xFF) - ((b2.get(s2 + i)) & 0xFF);
if (c != 0)
{
return c;
}
}
return 0;
}
private static int compareTimestampBytes(ByteBuffer o1, ByteBuffer o2)
{
int o1Pos = o1.position();
int o2Pos = o2.position();
int d = (o1.get(o1Pos + 6) & 0xF) - (o2.get(o2Pos + 6) & 0xF);
if (d != 0)
{
return d;
}
d = (o1.get(o1Pos + 7) & 0xFF) - (o2.get(o2Pos + 7) & 0xFF);
if (d != 0)
{
return d;
}
d = (o1.get(o1Pos + 4) & 0xFF) - (o2.get(o2Pos + 4) & 0xFF);
if (d != 0)
{
return d;
}
d = (o1.get(o1Pos + 5) & 0xFF) - (o2.get(o2Pos + 5) & 0xFF);
if (d != 0)
{
return d;
}
d = (o1.get(o1Pos) & 0xFF) - (o2.get(o2Pos) & 0xFF);
if (d != 0)
{
return d;
}
d = (o1.get(o1Pos + 1) & 0xFF) - (o2.get(o2Pos + 1) & 0xFF);
if (d != 0)
{
return d;
}
d = (o1.get(o1Pos + 2) & 0xFF) - (o2.get(o2Pos + 2) & 0xFF);
if (d != 0)
{
return d;
}
return (o1.get(o1Pos + 3) & 0xFF) - (o2.get(o2Pos + 3) & 0xFF);
}
}
| |
/* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2007, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* ---------------------
* WaferMapRenderer.java
* ---------------------
* (C) Copyright 2003-2007, by Robert Redburn and Contributors.
*
* Original Author: Robert Redburn;
* Contributor(s): David Gilbert (for Object Refinery Limited);
*
* Changes
* -------
* 25-Nov-2003 : Version 1, contributed by Robert Redburn. Changes have been
* made to fit the JFreeChart coding style (DG);
* 20-Apr-2005 : Small update for changes to LegendItem class (DG);
* ------------- JFREECHART 1.0.x ---------------------------------------------
* 02-Feb-2007 : Removed author tags from all over JFreeChart sources (DG);
*
*/
package org.jfree.chart.renderer;
import java.awt.Color;
import java.awt.Paint;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.geom.Rectangle2D;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.jfree.chart.LegendItem;
import org.jfree.chart.LegendItemCollection;
import org.jfree.chart.plot.DrawingSupplier;
import org.jfree.chart.plot.WaferMapPlot;
import org.jfree.data.general.WaferMapDataset;
/**
* A renderer for wafer map plots. Provides color managment facilities.
*/
public class WaferMapRenderer extends AbstractRenderer {
/** paint index */
private Map paintIndex;
/** plot */
private WaferMapPlot plot;
/** paint limit */
private int paintLimit;
/** default paint limit */
private static final int DEFAULT_PAINT_LIMIT = 35;
/** default multivalue paint calculation */
public static final int POSITION_INDEX = 0;
/** The default value index. */
public static final int VALUE_INDEX = 1;
/** paint index method */
private int paintIndexMethod;
/**
* Creates a new renderer.
*/
public WaferMapRenderer() {
this(null, null);
}
/**
* Creates a new renderer.
*
* @param paintLimit the paint limit.
* @param paintIndexMethod the paint index method.
*/
public WaferMapRenderer(int paintLimit, int paintIndexMethod) {
this(new Integer(paintLimit), new Integer(paintIndexMethod));
}
/**
* Creates a new renderer.
*
* @param paintLimit the paint limit.
* @param paintIndexMethod the paint index method.
*/
public WaferMapRenderer(Integer paintLimit, Integer paintIndexMethod) {
super();
this.paintIndex = new HashMap();
if (paintLimit == null) {
this.paintLimit = DEFAULT_PAINT_LIMIT;
}
else {
this.paintLimit = paintLimit.intValue();
}
this.paintIndexMethod = VALUE_INDEX;
if (paintIndexMethod != null) {
if (isMethodValid(paintIndexMethod.intValue())) {
this.paintIndexMethod = paintIndexMethod.intValue();
}
}
}
/**
* Verifies that the passed paint index method is valid.
*
* @param method the method.
*
* @return <code>true</code> or </code>false</code>.
*/
private boolean isMethodValid(int method) {
switch (method) {
case POSITION_INDEX: return true;
case VALUE_INDEX: return true;
default: return false;
}
}
/**
* Returns the drawing supplier from the plot.
*
* @return The drawing supplier.
*/
public DrawingSupplier getDrawingSupplier() {
DrawingSupplier result = null;
WaferMapPlot p = getPlot();
if (p != null) {
result = p.getDrawingSupplier();
}
return result;
}
/**
* Returns the plot.
*
* @return The plot.
*/
public WaferMapPlot getPlot() {
return this.plot;
}
/**
* Sets the plot and build the paint index.
*
* @param plot the plot.
*/
public void setPlot(WaferMapPlot plot) {
this.plot = plot;
makePaintIndex();
}
/**
* Returns the paint for a given chip value.
*
* @param value the value.
*
* @return The paint.
*/
public Paint getChipColor(Number value) {
return getSeriesPaint(getPaintIndex(value));
}
/**
* Returns the paint index for a given chip value.
*
* @param value the value.
*
* @return The paint index.
*/
private int getPaintIndex(Number value) {
return ((Integer) this.paintIndex.get(value)).intValue();
}
/**
* Builds a map of chip values to paint colors.
* paintlimit is the maximum allowed number of colors.
*/
private void makePaintIndex() {
if (this.plot == null) {
return;
}
WaferMapDataset data = this.plot.getDataset();
Number dataMin = data.getMinValue();
Number dataMax = data.getMaxValue();
Set uniqueValues = data.getUniqueValues();
if (uniqueValues.size() <= this.paintLimit) {
int count = 0; // assign a color for each unique value
for (Iterator i = uniqueValues.iterator(); i.hasNext();) {
this.paintIndex.put(i.next(), new Integer(count++));
}
}
else {
// more values than paints so map
// multiple values to the same color
switch (this.paintIndexMethod) {
case POSITION_INDEX:
makePositionIndex(uniqueValues);
break;
case VALUE_INDEX:
makeValueIndex(dataMax, dataMin, uniqueValues);
break;
default:
break;
}
}
}
/**
* Builds the paintindex by assigning colors based on the number
* of unique values: totalvalues/totalcolors.
*
* @param uniqueValues the set of unique values.
*/
private void makePositionIndex(Set uniqueValues) {
int valuesPerColor = (int) Math.ceil(
(double) uniqueValues.size() / this.paintLimit
);
int count = 0; // assign a color for each unique value
int paint = 0;
for (Iterator i = uniqueValues.iterator(); i.hasNext();) {
this.paintIndex.put(i.next(), new Integer(paint));
if (++count % valuesPerColor == 0) {
paint++;
}
if (paint > this.paintLimit) {
paint = this.paintLimit;
}
}
}
/**
* Builds the paintindex by assigning colors evenly across the range
* of values: maxValue-minValue/totalcolors
*
* @param max the maximum value.
* @param min the minumum value.
* @param uniqueValues the unique values.
*/
private void makeValueIndex(Number max, Number min, Set uniqueValues) {
double valueRange = max.doubleValue() - min.doubleValue();
double valueStep = valueRange / this.paintLimit;
int paint = 0;
double cutPoint = min.doubleValue() + valueStep;
for (Iterator i = uniqueValues.iterator(); i.hasNext();) {
Number value = (Number) i.next();
while (value.doubleValue() > cutPoint) {
cutPoint += valueStep;
paint++;
if (paint > this.paintLimit) {
paint = this.paintLimit;
}
}
this.paintIndex.put(value, new Integer(paint));
}
}
/**
* Builds the list of legend entries. called by getLegendItems in
* WaferMapPlot to populate the plot legend.
*
* @return The legend items.
*/
public LegendItemCollection getLegendCollection() {
LegendItemCollection result = new LegendItemCollection();
if (this.paintIndex != null && this.paintIndex.size() > 0) {
if (this.paintIndex.size() <= this.paintLimit) {
for (Iterator i = this.paintIndex.entrySet().iterator();
i.hasNext();) {
// in this case, every color has a unique value
Map.Entry entry = (Map.Entry) i.next();
String label = entry.getKey().toString();
String description = label;
Shape shape = new Rectangle2D.Double(1d, 1d, 1d, 1d);
Paint paint = getSeriesPaint(
((Integer) entry.getValue()).intValue()
);
Paint outlinePaint = Color.black;
Stroke outlineStroke = DEFAULT_STROKE;
result.add(new LegendItem(label, description, null,
null, shape, paint, outlineStroke, outlinePaint));
}
}
else {
// in this case, every color has a range of values
Set unique = new HashSet();
for (Iterator i = this.paintIndex.entrySet().iterator();
i.hasNext();) {
Map.Entry entry = (Map.Entry) i.next();
if (unique.add(entry.getValue())) {
String label = getMinPaintValue(
(Integer) entry.getValue()).toString()
+ " - " + getMaxPaintValue(
(Integer) entry.getValue()).toString();
String description = label;
Shape shape = new Rectangle2D.Double(1d, 1d, 1d, 1d);
Paint paint = getSeriesPaint(
((Integer) entry.getValue()).intValue()
);
Paint outlinePaint = Color.black;
Stroke outlineStroke = DEFAULT_STROKE;
result.add(new LegendItem(label, description,
null, null, shape, paint, outlineStroke,
outlinePaint));
}
} // end foreach map entry
} // end else
}
return result;
}
/**
* Returns the minimum chip value assigned to a color
* in the paintIndex
*
* @param index the index.
*
* @return The value.
*/
private Number getMinPaintValue(Integer index) {
double minValue = Double.POSITIVE_INFINITY;
for (Iterator i = this.paintIndex.entrySet().iterator(); i.hasNext();) {
Map.Entry entry = (Map.Entry) i.next();
if (((Integer) entry.getValue()).equals(index)) {
if (((Number) entry.getKey()).doubleValue() < minValue) {
minValue = ((Number) entry.getKey()).doubleValue();
}
}
}
return new Double(minValue);
}
/**
* Returns the maximum chip value assigned to a color
* in the paintIndex
*
* @param index the index.
*
* @return The value
*/
private Number getMaxPaintValue(Integer index) {
double maxValue = Double.NEGATIVE_INFINITY;
for (Iterator i = this.paintIndex.entrySet().iterator(); i.hasNext();) {
Map.Entry entry = (Map.Entry) i.next();
if (((Integer) entry.getValue()).equals(index)) {
if (((Number) entry.getKey()).doubleValue() > maxValue) {
maxValue = ((Number) entry.getKey()).doubleValue();
}
}
}
return new Double(maxValue);
}
} // end class wafermaprenderer
| |
package picard.illumina.parser.readers;
import picard.PicardException;
import picard.util.UnsignedTypeUtil;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
/**
* Reads a TileMetricsOut file commonly found in the InterOp directory of an Illumina Run Folder. This
* reader DOES NOT try to interpret the metrics code or metrics value but instead returns them in what
* is essentially a struct.
*
* File Format:
* byte 0 (unsigned byte) = The version number which must agree with the constructor parameter or an exception will be thrown
* byte 1 (unsigned byte) = The record size which must be 10 or an exception will be thrown
* bytes 3 + (current_record * 10) to (current_record * 10 + 10) (TileMetrics Record) = The actual records each of size 10 that
* get converted into IlluminaPhasingMetrics objects
*
* TileMetrics Record Format:
* Each 10 byte record is of the following format:
* byte 0-1 (unsigned short) = lane number
* byte 2-3 (unsigned short) = tile number
* byte 4-5 (unisgned short) = metrics code, see Theory of RTA document by Illumina for definition
* byte 6-9 (float) = metrics value, see Theory of RTA document by Illumina for definition
*/
public class TileMetricsOutReader implements Iterator<TileMetricsOutReader.IlluminaTileMetrics> {
private final BinaryFileIterator<ByteBuffer> bbIterator;
private float density;
private final TileMetricsVersion version;
/**
* Return a TileMetricsOutReader for the specified file
* @param tileMetricsOutFile The file to read
*/
public TileMetricsOutReader(final File tileMetricsOutFile) {
int versionInt = UnsignedTypeUtil.uByteToInt(MMapBackedIteratorFactory.getByteIterator(1, tileMetricsOutFile).getHeaderBytes().get());
this.version = TileMetricsVersion.findByKey(versionInt);
bbIterator = MMapBackedIteratorFactory.getByteBufferIterator(version.headerSize, version.recordSize, tileMetricsOutFile);
final ByteBuffer header = bbIterator.getHeaderBytes();
final int actualVersion = UnsignedTypeUtil.uByteToInt(header.get());
if (actualVersion != version.version) {
throw new PicardException("TileMetricsOutReader expects the version number to be " + version.version + ". Actual Version in Header(" + actualVersion + ")");
}
final int actualRecordSize = UnsignedTypeUtil.uByteToInt(header.get());
if (version.recordSize != actualRecordSize) {
throw new PicardException("TileMetricsOutReader expects the record size to be " + version.recordSize + ". Actual Record Size in Header(" + actualRecordSize + ")");
}
if (version == TileMetricsVersion.THREE) {
this.density = UnsignedTypeUtil.uIntToFloat(header.getInt());
}
}
public boolean hasNext() {
return bbIterator.hasNext();
}
public IlluminaTileMetrics next() {
if(!hasNext()) {
throw new NoSuchElementException();
}
return new IlluminaTileMetrics(bbIterator.next(), version);
}
public void remove() {
throw new UnsupportedOperationException();
}
public float getDensity() {
return density;
}
public int getVersion() { return version.version; }
/**
* IlluminaPhasingMetrics corresponds to a single record in a TileMetricsOut file
*/
public static class IlluminaTileMetrics {
private final IlluminaLaneTileCode laneTileCode;
private final float metricValue;
private byte type;
public IlluminaTileMetrics(final ByteBuffer bb, TileMetricsVersion version) {
if (version == TileMetricsVersion.THREE) {
this.laneTileCode = new IlluminaLaneTileCode(UnsignedTypeUtil.uShortToInt(bb.getShort()), bb.getInt(), 0);
//need to dump 9 bytes
this.type = bb.get();
this.metricValue = bb.getFloat();
} else {
this.laneTileCode = new IlluminaLaneTileCode(UnsignedTypeUtil.uShortToInt(bb.getShort()), UnsignedTypeUtil.uShortToInt(bb.getShort()),
UnsignedTypeUtil.uShortToInt(bb.getShort()));
this.metricValue = bb.getFloat();
}
}
public IlluminaTileMetrics(final int laneNumber, final int tileNumber, final int metricCode, final float metricValue) {
this.laneTileCode = new IlluminaLaneTileCode(laneNumber, tileNumber, metricCode);
this.metricValue = metricValue;
}
public int getLaneNumber() {
return laneTileCode.getLaneNumber();
}
public int getTileNumber() {
return laneTileCode.getTileNumber();
}
public int getMetricCode() {
return laneTileCode.getMetricCode();
}
public float getMetricValue() {
return metricValue;
}
public IlluminaLaneTileCode getLaneTileCode() {
return laneTileCode;
}
@Override
public boolean equals(final Object o) {
if (o instanceof IlluminaTileMetrics) {
final IlluminaTileMetrics that = (IlluminaTileMetrics) o;
return laneTileCode == that.laneTileCode && metricValue == that.metricValue; // Identical tile data should render exactly the same float.
} else {
return false;
}
}
@Override
public int hashCode() {
return String.format("%s:%s:%s:%s", laneTileCode.getLaneNumber(), laneTileCode.getTileNumber(), laneTileCode.getMetricCode(), metricValue).hashCode(); // Slow but adequate.
}
public boolean isClusterRecord() {
return type == 't';
}
}
/** Helper class which captures the combination of a lane, tile & metric code */
public static class IlluminaLaneTileCode {
private final int laneNumber;
private final int tileNumber;
private final int metricCode;
public IlluminaLaneTileCode(final int laneNumber, final int tileNumber, final int metricCode) {
this.laneNumber = laneNumber;
this.tileNumber = tileNumber;
this.metricCode = metricCode;
}
public int getLaneNumber() {
return laneNumber;
}
public int getTileNumber() {
return tileNumber;
}
public int getMetricCode() {
return metricCode;
}
@Override
public boolean equals(final Object o) {
if (o instanceof IlluminaLaneTileCode) {
final IlluminaLaneTileCode that = (IlluminaLaneTileCode) o;
return laneNumber == that.laneNumber && tileNumber == that.tileNumber && metricCode == that.metricCode;
} else {
return false;
}
}
@Override
public int hashCode() {
int result = laneNumber;
result = 31 * result + tileNumber;
result = 31 * result + metricCode;
return result;
}
}
public enum TileMetricsVersion {
TWO(2, 2, 10),
THREE(3, 6, 15);
public final int version;
private final int headerSize;
private final int recordSize;
private static final Map<Integer,TileMetricsVersion> versionLookupMap;
TileMetricsVersion(int version, int headerSize, int recordSize) {
this.version = version;
this.headerSize = headerSize;
this.recordSize = recordSize;
}
static {
versionLookupMap = new HashMap<>();
for (TileMetricsVersion v : TileMetricsVersion.values()) {
versionLookupMap.put(v.version, v);
}
}
public static TileMetricsVersion findByKey(int i) {
return versionLookupMap.get(i);
}
}
}
| |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.newsecurity.providers;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.domain.SecureSiteUrl;
import com.thoughtworks.go.domain.config.ConfigurationProperty;
import com.thoughtworks.go.plugin.access.authorization.AuthorizationExtension;
import com.thoughtworks.go.plugin.access.authorization.AuthorizationMetadataStore;
import com.thoughtworks.go.plugin.domain.authorization.*;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.server.newsecurity.models.AccessToken;
import com.thoughtworks.go.server.newsecurity.models.AuthenticationToken;
import com.thoughtworks.go.server.security.AuthorityGranter;
import com.thoughtworks.go.server.security.OnlyKnownUsersAllowedException;
import com.thoughtworks.go.server.security.userdetail.GoUserPrinciple;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.PluginRoleService;
import com.thoughtworks.go.server.service.SecurityService;
import com.thoughtworks.go.server.service.UserService;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.TestingClock;
import org.junit.Rule;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.migrationsupport.rules.EnableRuleMigrationSupport;
import org.junit.rules.ExpectedException;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import static com.thoughtworks.go.server.security.GoAuthority.ROLE_USER;
import static java.util.Arrays.asList;
import static java.util.Collections.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
class WebBasedPluginAuthenticationProviderTest {
private static final String PLUGIN_ID = "github.oauth";
private static final AccessToken CREDENTIALS = new AccessToken(singletonMap("access_token", "some-token"));
private AuthorizationExtension authorizationExtension;
private PluginRoleService pluginRoleService;
private TestingClock clock;
private WebBasedPluginAuthenticationProvider authenticationProvider;
private SecurityConfig securityConfig;
private UserService userService;
private AuthorityGranter authorityGranter;
private SecurityAuthConfig githubSecurityAuthconfig;
private GoConfigService goConfigService;
@BeforeEach
void setUp() {
SecurityService securityService = mock(SecurityService.class);
authorityGranter = spy(new AuthorityGranter(securityService));
goConfigService = mock(GoConfigService.class);
userService = mock(UserService.class);
authorizationExtension = mock(AuthorizationExtension.class);
pluginRoleService = mock(PluginRoleService.class);
clock = new TestingClock();
securityConfig = new SecurityConfig(true);
githubSecurityAuthconfig = new SecurityAuthConfig("github", PLUGIN_ID);
securityConfig.securityAuthConfigs().add(githubSecurityAuthconfig);
when(goConfigService.security()).thenReturn(securityConfig);
addPluginSupportingWebBasedAuthentication(PLUGIN_ID);
authenticationProvider = new WebBasedPluginAuthenticationProvider(authorizationExtension, authorityGranter, goConfigService, pluginRoleService, userService, clock);
}
@AfterEach
void tearDown() {
com.thoughtworks.go.ClearSingleton.clearSingletons();
}
@Nested
@EnableRuleMigrationSupport
class Authenticate {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
void shouldAuthenticateUserAgainstTheSpecifiedPlugin() {
PluginRoleConfig adminRole = new PluginRoleConfig("admin", "github", new ConfigurationProperty());
securityConfig.addRole(adminRole);
authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
verify(authorizationExtension).authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubSecurityAuthconfig), singletonList(adminRole));
}
@Test
void inCaseOfMultipleAuthConfigsOnSuccessfulAuthenticationShouldNotTryAuthenticatingUserUsingRemainingAuthConfig() {
User user = new User("username", "displayname", "emailId");
SecurityAuthConfig githubPublic = new SecurityAuthConfig("github_public", PLUGIN_ID);
SecurityAuthConfig githubEnterprise = new SecurityAuthConfig("github_enterprise", PLUGIN_ID);
PluginRoleConfig adminRole = new PluginRoleConfig("admin", githubPublic.getId(), new ConfigurationProperty());
PluginRoleConfig operatorRole = new PluginRoleConfig("operator", githubEnterprise.getId(), new ConfigurationProperty());
securityConfig.securityAuthConfigs().clear();
securityConfig.securityAuthConfigs().add(githubPublic);
securityConfig.securityAuthConfigs().add(githubEnterprise);
securityConfig.addRole(adminRole);
securityConfig.addRole(operatorRole);
when(authorizationExtension.authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubPublic), singletonList(adminRole))).thenReturn(new AuthenticationResponse(user, asList("admin")));
final AuthenticationToken<AccessToken> authenticationToken = authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
assertThat(authenticationToken.getCredentials()).isEqualTo(CREDENTIALS);
assertThat(authenticationToken.getPluginId()).isEqualTo(PLUGIN_ID);
assertThat(authenticationToken.isAuthenticated(clock, new SystemEnvironment()))
.isEqualTo(true);
assertThat(authenticationToken.getUser().getAuthorities())
.containsExactly(ROLE_USER.asAuthority());
verify(authorizationExtension).authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubPublic), singletonList(adminRole));
verify(authorizationExtension, never()).authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubEnterprise), singletonList(operatorRole));
}
@Test
void shouldCreateUserIfDoesNotExist() {
final User user = new User("username", "displayname", "emailId");
AuthenticationResponse authenticationResponse = new AuthenticationResponse(user, asList("admin"));
when(authorizationExtension.authenticateUser(eq(PLUGIN_ID), anyMap(), anyList(), anyList())).thenReturn(authenticationResponse);
authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
ArgumentCaptor<com.thoughtworks.go.domain.User> argumentCaptor = ArgumentCaptor.forClass(com.thoughtworks.go.domain.User.class);
verify(userService).addOrUpdateUser(argumentCaptor.capture());
com.thoughtworks.go.domain.User capturedUser = argumentCaptor.getValue();
assertThat(capturedUser.getUsername().getUsername().toString())
.isEqualTo(user.getUsername());
assertThat(capturedUser.getDisplayName())
.isEqualTo(user.getDisplayName());
assertThat(capturedUser.getEmail())
.isEqualTo(user.getEmailId());
}
@Test
void shouldAssignRolesToUser() {
final User user = new User("username", "displayname", "emailId");
AuthenticationResponse authenticationResponse = new AuthenticationResponse(user, asList("admin"));
when(authorizationExtension.authenticateUser(eq(PLUGIN_ID), anyMap(), anyList(), anyList())).thenReturn(authenticationResponse);
authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
verify(pluginRoleService).updatePluginRoles(PLUGIN_ID, user.getUsername(), CaseInsensitiveString.list("admin"));
}
@Test
void shouldReturnAuthenticationTokenOnSuccessfulAuthorization() {
final User user = new User("username", "displayname", "emailId");
AuthenticationResponse authenticationResponse = new AuthenticationResponse(user, asList("admin"));
when(authorizationExtension.authenticateUser(eq(PLUGIN_ID), anyMap(), anyList(), anyList())).thenReturn(authenticationResponse);
final AuthenticationToken<AccessToken> authenticationToken = authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
assertThat(authenticationToken.getCredentials()).isEqualTo(CREDENTIALS);
assertThat(authenticationToken.getPluginId()).isEqualTo(PLUGIN_ID);
assertThat(authenticationToken.getUser().getAuthorities())
.containsExactly(ROLE_USER.asAuthority());
assertThat(authenticationToken.isAuthenticated(clock, new SystemEnvironment()))
.isEqualTo(true);
}
@Test
void shouldReturnAuthTokenWithUserDetails() {
final User user = new User("username", "displayname", "emailId");
AuthenticationResponse authenticationResponse = new AuthenticationResponse(user, asList("admin"));
when(authorizationExtension.authenticateUser(eq(PLUGIN_ID), anyMap(), anyList(), anyList())).thenReturn(authenticationResponse);
final AuthenticationToken<AccessToken> authenticationToken = authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
assertThat(authenticationToken.getUser().getDisplayName()).isEqualTo(user.getDisplayName());
assertThat(authenticationToken.getUser().getUsername()).isEqualTo(user.getUsername());
assertThat(authenticationToken.getUser().getAuthorities())
.containsExactly(ROLE_USER.asAuthority());
}
@Test
void shouldEnsureUserDetailsInAuthTokenHasDisplayName() {
AuthenticationResponse authenticationResponse = new AuthenticationResponse(new User("username", null, "email"), asList("admin"));
when(authorizationExtension.authenticateUser(eq(PLUGIN_ID), anyMap(), anyList(), anyList())).thenReturn(authenticationResponse);
final AuthenticationToken<AccessToken> authenticationToken = authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
assertThat(authenticationToken.getUser().getDisplayName()).isEqualTo(authenticationResponse.getUser().getUsername());
}
@Test
void shouldAssignRoleBeforeGrantingAnAuthority() {
final User user = new User("username", null, "email");
AuthenticationResponse authenticationResponse = new AuthenticationResponse(user, asList("admin"));
when(authorizationExtension.authenticateUser(eq(PLUGIN_ID), anyMap(), anyList(), anyList())).thenReturn(authenticationResponse);
final InOrder inOrder = inOrder(pluginRoleService, authorityGranter);
authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
inOrder.verify(pluginRoleService).updatePluginRoles(PLUGIN_ID, user.getUsername(), asList(new CaseInsensitiveString("admin")));
inOrder.verify(authorityGranter).authorities(user.getUsername());
}
@Test
void shouldPerformOperationInSequence() {
final User user = new User("username", null, "email");
AuthenticationResponse authenticationResponse = new AuthenticationResponse(user, asList("admin"));
when(authorizationExtension.authenticateUser(eq(PLUGIN_ID), anyMap(), anyList(), anyList())).thenReturn(authenticationResponse);
final InOrder inOrder = inOrder(authorizationExtension, pluginRoleService, authorityGranter, userService);
authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
inOrder.verify(authorizationExtension).authenticateUser(eq(PLUGIN_ID), eq(CREDENTIALS.getCredentials()), anyList(), anyList());
inOrder.verify(userService).addOrUpdateUser(any(com.thoughtworks.go.domain.User.class));
inOrder.verify(pluginRoleService).updatePluginRoles(PLUGIN_ID, user.getUsername(), asList(new CaseInsensitiveString("admin")));
inOrder.verify(authorityGranter).authorities(user.getUsername());
}
@Test
void shouldErrorOutWhenAutoRegistrationOfNewUserIsDisabledByAdmin() {
final User user = new User("username", null, "email");
AuthenticationResponse authenticationResponse = new AuthenticationResponse(user, asList("admin"));
when(authorizationExtension.authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubSecurityAuthconfig), emptyList())).thenReturn(authenticationResponse);
doThrow(new OnlyKnownUsersAllowedException("username", "Please ask the administrator to add you to GoCD.")).when(userService).addOrUpdateUser(any());
thrown.expect(OnlyKnownUsersAllowedException.class);
thrown.expectMessage("Please ask the administrator to add you to GoCD.");
authenticationProvider.authenticate(CREDENTIALS, PLUGIN_ID);
}
}
@Nested
@EnableRuleMigrationSupport
class ReAuthenticate {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
void shouldReAuthenticateUserUsingAuthenticationToken() {
final GoUserPrinciple user = new GoUserPrinciple("bob", "Bob");
final AuthenticationToken<AccessToken> oldAuthenticationToken = new AuthenticationToken<>(user, CREDENTIALS, PLUGIN_ID, clock.currentTimeMillis(), "github");
authenticationProvider.reauthenticate(oldAuthenticationToken);
verify(authorizationExtension).authenticateUser(eq(PLUGIN_ID), eq(CREDENTIALS.getCredentials()), eq(singletonList(githubSecurityAuthconfig)), anyList());
}
@Test
void shouldReturnNullInCaseOfErrors() {
final GoUserPrinciple user = new GoUserPrinciple("bob", "Bob");
final AuthenticationToken<AccessToken> oldAuthenticationToken = new AuthenticationToken<>(user, CREDENTIALS, PLUGIN_ID, clock.currentTimeMillis(), "github");
when(authorizationExtension.authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubSecurityAuthconfig), emptyList()))
.thenReturn(null);
final AuthenticationToken<AccessToken> authenticationToken = authenticationProvider.reauthenticate(oldAuthenticationToken);
assertThat(authenticationToken).isNull();
}
@Test
void shouldTryToReAuthenticateUserAgainWhenPreviouslyAuthenticatedAuthConfigForThePluginIsDeleted() {
final GoUserPrinciple user = new GoUserPrinciple("bob", "Bob");
securityConfig.securityAuthConfigs().remove(githubSecurityAuthconfig);
final SecurityAuthConfig githubPrivateSecurityConfig = new SecurityAuthConfig("github-private", PLUGIN_ID);
securityConfig.securityAuthConfigs().add(githubPrivateSecurityConfig);
final AuthenticationToken<AccessToken> oldAuthenticationToken = new AuthenticationToken<>(user, CREDENTIALS, PLUGIN_ID, clock.currentTimeMillis(), "github");
authenticationProvider.reauthenticate(oldAuthenticationToken);
verify(authorizationExtension, never()).authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubSecurityAuthconfig), emptyList());
verify(authorizationExtension).authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubPrivateSecurityConfig), emptyList());
}
@Test
void shouldErrorOutWhenAutoRegistrationOfNewUserIsDisabledByAdmin() {
final GoUserPrinciple goUserPrinciple = new GoUserPrinciple("bob", "Bob");
final AuthenticationToken<AccessToken> oldAuthenticationToken = new AuthenticationToken<>(goUserPrinciple, CREDENTIALS, PLUGIN_ID, clock.currentTimeMillis(), "github");
final User user = new User("username", null, "email");
AuthenticationResponse authenticationResponse = new AuthenticationResponse(user, asList("admin"));
when(authorizationExtension.authenticateUser(PLUGIN_ID, CREDENTIALS.getCredentials(), singletonList(githubSecurityAuthconfig), emptyList())).thenReturn(authenticationResponse);
doThrow(new OnlyKnownUsersAllowedException("username", "Please ask the administrator to add you to GoCD.")).when(userService).addOrUpdateUser(any());
thrown.expect(OnlyKnownUsersAllowedException.class);
thrown.expectMessage("Please ask the administrator to add you to GoCD.");
authenticationProvider.reauthenticate(oldAuthenticationToken);
}
}
@Test
void shouldGetAuthorizationServerUrlWithConfiguredSecureSiteUrl() {
final ServerConfig serverConfig = mock(ServerConfig.class);
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(serverConfig.hasAnyUrlConfigured()).thenReturn(true);
when(serverConfig.getSiteUrlPreferablySecured()).thenReturn(new SecureSiteUrl("https://foo.bar.com"));
authenticationProvider.getAuthorizationServerUrl(PLUGIN_ID, "https://example.com");
verify(authorizationExtension, never()).getAuthorizationServerUrl(PLUGIN_ID, singletonList(githubSecurityAuthconfig), "https://example.com");
verify(authorizationExtension).getAuthorizationServerUrl(PLUGIN_ID, singletonList(githubSecurityAuthconfig), "https://foo.bar.com");
}
@Test
void shouldGetAuthorizationServerUrl() {
final ServerConfig serverConfig = mock(ServerConfig.class);
when(goConfigService.serverConfig()).thenReturn(serverConfig);
when(serverConfig.hasAnyUrlConfigured()).thenReturn(false);
authenticationProvider.getAuthorizationServerUrl(PLUGIN_ID, "https://example.com");
verify(authorizationExtension).getAuthorizationServerUrl(PLUGIN_ID, singletonList(githubSecurityAuthconfig), "https://example.com");
}
@Test
void shouldFetchAccessTokenFromPlugin() {
when(authorizationExtension.fetchAccessToken(PLUGIN_ID, emptyMap(), singletonMap("code", "some-code"), singletonList(githubSecurityAuthconfig))).thenReturn(singletonMap("access_token", "some-access-token"));
final AccessToken accessToken = authenticationProvider.fetchAccessToken(PLUGIN_ID, emptyMap(), singletonMap("code", "some-code"));
assertThat(accessToken.getCredentials())
.containsEntry("access_token", "some-access-token")
.hasSize(1);
}
private void addPluginSupportingWebBasedAuthentication(String pluginId) {
AuthorizationPluginInfo pluginInfo = new AuthorizationPluginInfo(
new GoPluginDescriptor(pluginId, null, null, null, null, false), null, null, null,
new Capabilities(SupportedAuthType.Web, true, false, false));
AuthorizationMetadataStore.instance().setPluginInfo(pluginInfo);
}
}
| |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.media_router.caf;
import static org.chromium.components.media_router.caf.CastUtils.isSameOrigin;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.mediarouter.media.MediaRouter;
import com.google.android.gms.cast.framework.CastSession;
import org.chromium.base.Log;
import org.chromium.components.media_router.BrowserMediaRouter;
import org.chromium.components.media_router.ClientRecord;
import org.chromium.components.media_router.MediaRoute;
import org.chromium.components.media_router.MediaRouteManager;
import org.chromium.components.media_router.MediaRouteProvider;
import org.chromium.components.media_router.MediaSink;
import org.chromium.components.media_router.MediaSource;
import java.util.HashMap;
import java.util.Map;
/**
* A {@link MediaRouteProvider} implementation for Cast devices and applications, using Cast v3 API.
*/
public class CafMediaRouteProvider extends CafBaseMediaRouteProvider {
private static final String TAG = "CafMRP";
private static final String AUTO_JOIN_PRESENTATION_ID = "auto-join";
private static final String PRESENTATION_ID_SESSION_ID_PREFIX = "cast-session_";
private CreateRouteRequestInfo mPendingCreateRouteRequestInfo;
@VisibleForTesting
ClientRecord mLastRemovedRouteRecord;
// The records for clients, which must match mRoutes. This is used for the saving last record
// for autojoin.
private final Map<String, ClientRecord> mClientIdToRecords =
new HashMap<String, ClientRecord>();
@VisibleForTesting
CafMessageHandler mMessageHandler;
// The session controller which is always attached to the current CastSession.
private final CastSessionController mSessionController;
public static CafMediaRouteProvider create(MediaRouteManager manager) {
return new CafMediaRouteProvider(BrowserMediaRouter.getAndroidMediaRouter(), manager);
}
public Map<String, ClientRecord> getClientIdToRecords() {
return mClientIdToRecords;
}
@Override
public void joinRoute(
String sourceId, String presentationId, String origin, int tabId, int nativeRequestId) {
CastMediaSource source = CastMediaSource.from(sourceId);
if (source == null || source.getClientId() == null) {
mManager.onJoinRouteRequestError("Unsupported presentation URL", nativeRequestId);
return;
}
if (!hasSession()) {
mManager.onJoinRouteRequestError("No presentation", nativeRequestId);
return;
}
if (!canJoinExistingSession(presentationId, origin, tabId, source)) {
mManager.onJoinRouteRequestError("No matching route", nativeRequestId);
return;
}
MediaRoute route =
new MediaRoute(sessionController().getSink().getId(), sourceId, presentationId);
addRoute(route, origin, tabId, nativeRequestId, /* wasLaunched= */ false);
}
// TODO(zqzhang): the clientRecord/route management is not clean and the logic seems to be
// problematic.
@Override
public void closeRoute(String routeId) {
boolean isRouteInRecord = mRoutes.containsKey(routeId);
super.closeRoute(routeId);
if (!isRouteInRecord) return;
ClientRecord client = getClientRecordByRouteId(routeId);
if (client != null) {
MediaSink sink = sessionController().getSink();
if (sink != null) {
mMessageHandler.sendReceiverActionToClient(routeId, sink, client.clientId, "stop");
}
}
}
@Override
public void sendStringMessage(String routeId, String message) {
Log.d(TAG, "Received message from client: %s", message);
if (!mRoutes.containsKey(routeId)) {
return;
}
mMessageHandler.handleMessageFromClient(message);
}
@Override
protected MediaSource getSourceFromId(String sourceId) {
return CastMediaSource.from(sourceId);
}
@Override
public BaseSessionController sessionController() {
return mSessionController;
}
public void sendMessageToClient(String clientId, String message) {
ClientRecord clientRecord = mClientIdToRecords.get(clientId);
if (clientRecord == null) return;
if (!clientRecord.isConnected) {
Log.d(TAG, "Queueing message to client %s: %s", clientId, message);
clientRecord.pendingMessages.add(message);
return;
}
// Flush pending messages, if any, before sending the current message to ensure that the
// messages are delivered in order.
flushPendingMessagesToClient(clientRecord);
Log.d(TAG, "Sending message to client %s: %s", clientId, message);
mManager.onMessage(clientRecord.routeId, message);
}
/** Flushes all pending messages in record to a client. */
public void flushPendingMessagesToClient(ClientRecord clientRecord) {
for (String message : clientRecord.pendingMessages) {
Log.d(TAG, "Deqeueing message for client %s: %s", clientRecord.clientId, message);
mManager.onMessage(clientRecord.routeId, message);
}
clientRecord.pendingMessages.clear();
}
@NonNull
public CafMessageHandler getMessageHandler() {
return mMessageHandler;
}
@Override
protected void handleSessionStart(CastSession session, String sessionId) {
super.handleSessionStart(session, sessionId);
for (ClientRecord clientRecord : mClientIdToRecords.values()) {
// Should be exactly one instance of MediaRoute/ClientRecord at this moment.
mMessageHandler.sendReceiverActionToClient(clientRecord.routeId,
sessionController().getSink(), clientRecord.clientId, "cast");
}
mMessageHandler.onSessionStarted();
sessionController().getSession().getRemoteMediaClient().requestStatus();
}
@Override
protected void addRoute(
MediaRoute route, String origin, int tabId, int nativeRequestId, boolean wasLaunched) {
super.addRoute(route, origin, tabId, nativeRequestId, wasLaunched);
CastMediaSource source = CastMediaSource.from(route.sourceId);
final String clientId = source.getClientId();
if (clientId == null || mClientIdToRecords.containsKey(clientId)) return;
mClientIdToRecords.put(clientId,
new ClientRecord(route.id, clientId, source.getApplicationId(),
source.getAutoJoinPolicy(), origin, tabId));
}
@Override
protected void removeRouteFromRecord(String routeId) {
ClientRecord record = getClientRecordByRouteId(routeId);
if (record != null) {
mLastRemovedRouteRecord = mClientIdToRecords.remove(record.clientId);
}
super.removeRouteFromRecord(routeId);
}
@Nullable
private ClientRecord getClientRecordByRouteId(String routeId) {
for (ClientRecord record : mClientIdToRecords.values()) {
if (record.routeId.equals(routeId)) return record;
}
return null;
}
private CafMediaRouteProvider(MediaRouter androidMediaRouter, MediaRouteManager manager) {
super(androidMediaRouter, manager);
mSessionController = new CastSessionController(this);
mMessageHandler = new CafMessageHandler(this, mSessionController);
}
@VisibleForTesting
boolean canJoinExistingSession(
String presentationId, String origin, int tabId, CastMediaSource source) {
if (AUTO_JOIN_PRESENTATION_ID.equals(presentationId)) {
return canAutoJoin(source, origin, tabId);
}
if (presentationId.startsWith(PRESENTATION_ID_SESSION_ID_PREFIX)) {
String sessionId = presentationId.substring(PRESENTATION_ID_SESSION_ID_PREFIX.length());
return sessionId != null && sessionId.equals(sessionController().getSessionId());
}
for (MediaRoute route : mRoutes.values()) {
if (route.presentationId.equals(presentationId)) return true;
}
return false;
}
private boolean canAutoJoin(CastMediaSource source, String origin, int tabId) {
if (source.getAutoJoinPolicy().equals(CastMediaSource.AUTOJOIN_PAGE_SCOPED)) return false;
CastMediaSource currentSource = (CastMediaSource) sessionController().getSource();
if (!currentSource.getApplicationId().equals(source.getApplicationId())) return false;
if (mClientIdToRecords.isEmpty() && mLastRemovedRouteRecord != null) {
return isSameOrigin(origin, mLastRemovedRouteRecord.origin)
&& tabId == mLastRemovedRouteRecord.tabId;
}
if (mClientIdToRecords.isEmpty()) return false;
ClientRecord client = mClientIdToRecords.values().iterator().next();
if (source.getAutoJoinPolicy().equals(CastMediaSource.AUTOJOIN_ORIGIN_SCOPED)) {
return isSameOrigin(origin, client.origin);
}
if (source.getAutoJoinPolicy().equals(CastMediaSource.AUTOJOIN_TAB_AND_ORIGIN_SCOPED)) {
return isSameOrigin(origin, client.origin) && tabId == client.tabId;
}
return false;
}
}
| |
/************************************************************************
*
* 1. This software is for the purpose of demonstrating one of many
* ways to implement the algorithms in Introduction to Algorithms,
* Second edition, by Thomas H. Cormen, Charles E. Leiserson, Ronald
* L. Rivest, and Clifford Stein. This software has been tested on a
* limited set of test cases, but it has not been exhaustively tested.
* It should not be used for mission-critical applications without
* further testing.
*
* 2. McGraw-Hill licenses and authorizes you to use this software
* only on a microcomputer located within your own facilities.
*
* 3. You will abide by the Copyright Law of the United Sates.
*
* 4. You may prepare a derivative version of this software provided
* that your source code indicates that it based on this software and
* also that you have made changes to it.
*
* 5. If you believe that you have found an error in this software,
* please send email to clrs-java-bugs@mhhe.com. If you have a
* suggestion for an improvement, please send email to
* clrs-java-suggestions@mhhe.com.
*
***********************************************************************/
package timeBench.data.util;
/**
* Implements the {@link Dictionary} interface as a binary search tree
* from Chapter 12 of <i>Introduction to Algorithms</i>, Second
* edition. Objects inserted into a binary search tree must implement
* the <code>Comparable</code> interface.
*
* <p>
*
* When extending this class, you must instantiate a new
* <code>nil</code> of the proper type and override constructors along
* with the other methods. See {@link RedBlackTree} for an example.
*/
abstract class BinarySearchTree
{
/** Root of the binary search tree. */
protected Node root;
/** Sentinel, replaces NIL in the textbook's code. */
protected Node nil;
/**
* Interface for when we visit a node during a walk.
*/
public interface Visitor
{
/**
* Perform some action upon visiting the node.
*
* @param handle Handle that identifies the node being visited.
*/
public Object visit(Object handle);
}
/**
* Inner class for a node of a binary search tree. May be
* extended in subclasses of <code>BinarySearchTree</code>.
*/
protected class Node implements Comparable<Node>
{
/** The data stored in the node. */
protected int row;
/** The node's parent. */
protected Node parent;
/** The node's left child. */
protected Node left;
/** The node's right child. */
protected Node right;
/**
* Initializes a node with the data and makes other pointers
* nil.
*
* @param data Data to save in the node.
*/
public Node(int row)
{
this.row = row;
parent = nil;
left = nil;
right = nil;
}
/**
* Compares this node to another node. The comparison is
* based on the <code>data</code> instance variables of the
* two nodes.
*
* @param o The other node.
* @return A negative integer if this node is less than
* <code>o</code>; 0 if this node equals <code>o</code>; a
* positive integer if this node is greater than
* <code>o</code>.
* @throws ClassCastException if <code>o</code> is not a
* <code>Node</code>.
*/
// Compare this node to another node.
public int compareTo(Node o)
{
throw new UnsupportedOperationException("compareTo");
// return data.compareTo(((Node) o).data);
}
/**
* Returns the <code>data</code> instance variable of this node
* as a <code>String</code>.
*/
public String toString()
{
if (this == nil)
return "nil";
else
return row + "";
}
/**
* Returns a multiline <code>String</code> representation of
* the subtree rooted at this node, representing the depth of
* each node by two spaces per depth preceding the
* <code>String</code> representation of the node.
*
* @param depth Depth of this node.
*/
public String toString(int depth)
{
String result = "";
if (left != nil)
result += left.toString(depth + 1);
for (int i = 0; i < depth; i++)
result += " ";
result += toString() + "\n";
if (right != nil)
result += right.toString(depth + 1);
return result;
}
}
/**
* Sets the sentinel <code>nil</code> to a given node.
*
* @param node The node that <code>nil</code> is set to.
*/
protected void setNil(Node node)
{
nil = node;
nil.parent = nil;
nil.left = nil;
nil.right = nil;
}
/**
* Creates a binary search tree with just a <code>nil</code>,
* which is the root.
*/
public BinarySearchTree()
{
setNil(new Node(-1));
root = nil;
}
/**
* Returns <code>true</code> if the given node is the sentinel
* <code>nil</code>, <code>false</code> otherwise.
*
* @param node The node that is being asked about.
*/
public boolean isNil(Object node)
{
return node == nil;
}
/**
* Returns a multiline <code>String</code> representation of the
* tree, representing the depth of each node by two spaces per
* depth preceding the <code>String</code> representation of the
* node.
*/
public String toString()
{
return root.toString(0);
}
/**
* Returns the node with the minimum key in the tree.
*
* @return A <code>Node</code> object with the minimum key in the
* tree, or the sentinel <code>nil</code> if the tree is empty.
*/
public int minimum()
{
return treeMinimum(root);
}
/**
* Returns the node with the minimum key in the subtree rooted at
* a node.
*
* @param x Root of the subtree.
* @return A <code>Node</code> object with the minimum key in the
* tree, or the sentinel <code>nil</code> if the tree is empty.
*/
protected int treeMinimum(Node x)
{
while (x.left != nil)
x = x.left;
return x.row;
}
/**
* Returns the row with the "maximum" interval in the index according to the {@link IntervalComparator#compare(long, long, long, long) method}.
*
* @return A <code>Node</code> object with the maximum key in the
* tree, or the sentinel <code>nil</code> if the tree is empty.
*/
public int maximum()
{
return treeMaximum(root);
}
/**
* Returns the row node with the maximum key in the subtree rooted at
* a node.
*
* @param x Root of the subtree.
* @return A <code>Node</code> object with the maximum key in the
* tree, or the sentinel <code>nil</code> if the tree is empty.
*/
protected int treeMaximum(Node x)
{
while (x.right != nil)
x = x.right;
return x.row;
}
/**
* Returns the successor of a given node in an inorder walk of the
* tree.
*
* @param node The node whose successor is returned.
* @return If <code>node</code> has a successor, it is returned.
* Otherwise, return the sentinel <code>nil</code>.
*/
public Object successor(Object node)
{
Node x = (Node) node;
if (x.right != nil)
return treeMinimum(x.right);
Node y = x.parent;
while (y != nil && x == y.right) {
x = y;
y = y.parent;
}
return y;
}
/**
* Returns the predecessor of a given node in an inorder walk of
* the tree.
*
* @param node The node whose predecessor is returned.
* @return If <code>node</code> has a predecessor, it is returned.
* Otherwise, return the sentinel <code>nil</code>.
*/
public Object predecessor(Object node)
{
Node x = (Node) node;
if (x.left != nil)
return treeMaximum(x.left);
Node y = x.parent;
while (y != nil && x == y.left) {
x = y;
y = y.parent;
}
return y;
}
/**
* Inserts data into the tree, creating a new node for this data.
*
* @param data Data to be inserted into the tree.
* @return A reference to the <code>Node</code> object created.
* The <code>Node</code> class is opaque to methods outside this
* class.
*/
public Object insert(int row)
{
Node z = new Node(row);
treeInsert(z);
return z;
}
/**
* Inserts a node into the tree.
*
* @param z The node to insert.
*/
protected void treeInsert(Node z)
{
Node y = nil;
Node x = root;
while (x != nil) {
y = x;
if (z.compareTo(x) <= 0)
x = x.left;
else
x = x.right;
}
z.parent = y;
if (y == nil)
root = z; // the tree had been empty
else {
if (z.compareTo(y) <= 0)
y.left = z;
else
y.right = z;
}
}
}
// $Id: BinarySearchTree.java,v 1.1 2003/10/14 16:56:20 thc Exp $
// $Log: BinarySearchTree.java,v $
// Revision 1.1 2003/10/14 16:56:20 thc
// Initial revision.
//
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.concurrent.Callable;
import org.apache.ignite.Ignite;
import org.apache.ignite.Ignition;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.affinity.fair.FairAffinityFunction;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.util.lang.GridAbsPredicate;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
/**
* Tests affinity assignment for different affinity types.
*/
public class IgniteClientAffinityAssignmentSelfTest extends GridCommonAbstractTest {
/** */
private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
public static final int PARTS = 256;
/** */
private boolean cache;
/** */
private int aff;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder);
if (cache) {
CacheConfiguration ccfg = new CacheConfiguration();
ccfg.setCacheMode(CacheMode.PARTITIONED);
ccfg.setBackups(1);
ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL);
ccfg.setNearConfiguration(null);
if (aff == 0)
ccfg.setAffinity(new RendezvousAffinityFunction(false, PARTS));
else
ccfg.setAffinity(new FairAffinityFunction(PARTS));
cfg.setCacheConfiguration(ccfg);
}
else
cfg.setClientMode(true);
return cfg;
}
/**
* @throws Exception If failed.
*/
public void testRendezvousAssignment() throws Exception {
aff = 0;
checkAffinityFunction();
}
/**
* @throws Exception If failed.
*/
public void testFairAssignment() throws Exception {
aff = 1;
checkAffinityFunction();
}
/**
* @throws Exception If failed.
*/
private void checkAffinityFunction() throws Exception {
cache = true;
startGridsMultiThreaded(3, true);
long topVer = 3;
try {
checkAffinity(topVer++);
cache = false;
final Ignite ignite3 = startGrid(3);
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
((IgniteKernal)ignite3).getCache(null);
return null;
}
}, IllegalArgumentException.class, null);
assertNotNull(ignite3.cache(null)); // Start client cache.
((IgniteKernal)ignite3).getCache(null);
checkAffinity(topVer++);
final Ignite ignite4 = startGrid(4);
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
((IgniteKernal)ignite4).getCache(null);
return null;
}
}, IllegalArgumentException.class, null);
assertNotNull(ignite4.cache(null)); // Start client cache.
((IgniteKernal)ignite4).getCache(null);
checkAffinity(topVer++);
final Ignite ignite5 = startGrid(5); // Node without cache.
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
((IgniteKernal)ignite5).getCache(null);
return null;
}
}, IllegalArgumentException.class, null);
checkAffinity(topVer++);
stopGrid(5);
checkAffinity(topVer++);
stopGrid(4);
checkAffinity(topVer++);
stopGrid(3);
checkAffinity(topVer);
}
finally {
stopAllGrids();
}
}
/**
* @param topVer Topology version.
* @throws Exception If failed.
*/
private void checkAffinity(long topVer) throws Exception {
awaitTopology(topVer);
Affinity<Object> aff = grid(0).affinity(null);
for (Ignite grid : Ignition.allGrids()) {
try {
if (grid.cluster().localNode().id().equals(grid(0).localNode().id()))
continue;
Affinity<Object> checkAff = grid.affinity(null);
for (int p = 0; p < PARTS; p++)
assertEquals(aff.mapPartitionToPrimaryAndBackups(p), checkAff.mapPartitionToPrimaryAndBackups(p));
}
catch (IllegalArgumentException ignored) {
// Skip the node without cache.
}
}
}
/**
* @param topVer Topology version.
* @throws Exception If failed.
*/
private void awaitTopology(final long topVer) throws Exception {
for (Ignite grid : Ignition.allGrids()) {
final GridCacheAdapter cache = ((IgniteKernal)grid).internalCache(null);
if (cache == null)
continue;
GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override public boolean apply() {
return cache.context().affinity().affinityTopologyVersion().topologyVersion() == topVer;
}
}, 5000);
assertEquals(topVer, cache.context().affinity().affinityTopologyVersion().topologyVersion());
}
}
}
| |
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.biodata.formats.alignment;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.sf.samtools.*;
import org.opencb.biodata.models.alignment.Alignment;
import org.opencb.biodata.models.alignment.exceptions.ShortReferenceSequenceException;
import org.opencb.biodata.models.feature.Region;
import org.opencb.commons.containers.map.QueryOptions;
/**
*
* @author Cristina Yenyxe Gonzalez Garcia <cgonzalez@cipf.es>
*/
public class AlignmentHelper {
/**
* Given a cigar string, returns a list of alignment differences with
* the reference sequence.
*
* TODO: Implement throw ShortReferenceSequenceException
* TODO: Use MD:Z:XXXX SAM reader attribute
* TODO: Store original CIGAR
*
* @param record The input cigar string
* @param refStr reference sequence.
* @param maxStoredSequence Max length for stored sequences. Default = 30
* @return The list of alignment differences
*/
public static List<Alignment.AlignmentDifference> getDifferencesFromCigar(SAMRecord record, String refStr, int maxStoredSequence) {
List<Alignment.AlignmentDifference> differences = new LinkedList<>();
int index = 0, indexRef = 0, indexMismatchBlock = 0, realStart;
AlignmentBlock blk;
// System.out.println("align start = " + record.getAlignmentStart() +
// "\t1st block start = " + record.getAlignmentBlocks().get(0).getReferenceStart() +
// "\n*****\n" + refStr + "\n" + record.getReadString());
if ((record.getFlags() & Alignment.SEGMENT_UNMAPPED) != 0) { // umnmapped, return the read as MATCH_MISMATCH
Alignment.AlignmentDifference alignmentDifference = new Alignment.AlignmentDifference(
0, Alignment.AlignmentDifference.MATCH_MISMATCH, record.getReadString());
differences.add(alignmentDifference);
return differences;
}
for (CigarElement element : record.getCigar().getCigarElements()) {
int cigarLen = element.getLength();
String subref = null, subread = null;
Alignment.AlignmentDifference currentDifference = null;
switch (element.getOperator()) {
case EQ:
blk = record.getAlignmentBlocks().get(indexMismatchBlock);
realStart = blk.getReferenceStart() - record.getAlignmentStart();
// Picard ignores hard clipping, the indices could be necessary
indexRef = realStart >= indexRef ? realStart : indexRef;
index = index + record.getAlignmentBlocks().get(indexMismatchBlock).getLength();
indexRef = indexRef + record.getAlignmentBlocks().get(indexMismatchBlock).getLength();
indexMismatchBlock++;
break;
case M:
case X:
blk = record.getAlignmentBlocks().get(indexMismatchBlock);
realStart = blk.getReferenceStart() - record.getAlignmentStart();
// Picard ignores hard clipping, the indices could be necessary
indexRef = realStart >= indexRef ? realStart : indexRef;
subread = record.getReadString().substring(index, Math.min(index + blk.getLength(), record.getReadString().length()));
if (refStr == null) {
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.MATCH_MISMATCH, cigarLen);
currentDifference.setSeq(subread);
} else {
subref = refStr.substring(indexRef, indexRef + blk.getLength());
differences.addAll(getMismatchDiff(subref, subread, indexRef));
}
index = index + record.getAlignmentBlocks().get(indexMismatchBlock).getLength();
indexRef = indexRef + record.getAlignmentBlocks().get(indexMismatchBlock).getLength();
indexMismatchBlock++;
break;
case I:
if (cigarLen < maxStoredSequence) {
subread = record.getReadString().substring(index, index + cigarLen);
} else { // Get only first 30 characters in the sequence to copy
subread = record.getReadString().substring(index, index + maxStoredSequence-3).concat("...");
}
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.INSERTION, subread, cigarLen);
index = index + cigarLen;
break;
case D:
if (refStr == null) {
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.DELETION, cigarLen);
} else {
if (cigarLen < maxStoredSequence) {
subref = refStr.substring(indexRef, indexRef + cigarLen);
} else { // Get only first 30 characters in the sequence to copy
subref = refStr.substring(indexRef, indexRef + maxStoredSequence-3).concat("...");
}
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.DELETION, subref, cigarLen);
}
indexRef = indexRef + cigarLen;
break;
case N:
if (refStr == null) {
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.SKIPPED_REGION, cigarLen);
} else {
if (cigarLen < maxStoredSequence) {
subref = refStr.substring(indexRef, indexRef + cigarLen);
} else { // Get only first 30 characters in the sequence to copy
subref = refStr.substring(indexRef, indexRef + maxStoredSequence-3).concat("...");
}
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.SKIPPED_REGION, subref, cigarLen);
}
indexRef = indexRef + cigarLen;
break;
case S:
subread = record.getReadString().substring(index, index + cigarLen);
if (refStr == null || index+cigarLen > refStr.length()) {
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.SOFT_CLIPPING, subread);
} else {
subref = refStr.substring(index, index+cigarLen);
if(subread.equals(subref)){
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.SOFT_CLIPPING, cigarLen);
} else {
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.SOFT_CLIPPING, subread);
}
}
index = index + cigarLen;
indexRef = indexRef + cigarLen;
break;
case H:
if (refStr == null) {
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.HARD_CLIPPING, cigarLen);
} else {
subref = refStr.substring(indexRef, Math.min(indexRef + cigarLen, refStr.length()));
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.HARD_CLIPPING, subref);
}
indexRef = indexRef + cigarLen;
break;
case P:
// subref = refStr.substring(indexRef, indexRef + cigarLen);
currentDifference = new Alignment.AlignmentDifference(indexRef, Alignment.AlignmentDifference.PADDING, cigarLen);
// indexRef = indexRef + cigarLen;
break;
}
if (currentDifference != null) {
differences.add(currentDifference);
}
}
return differences;
}
public static List<Alignment.AlignmentDifference> getDifferencesFromCigar(SAMRecord record, String refStr) {
return getDifferencesFromCigar(record, refStr, 30);
}
/**
* Compares all differences with the referenceSequence in order to reduce the stored sequence.
* Also adds sequence for deletion differences.
*
* @param alignment The Alignment
* @param referenceSequence Reference sequence
* @param referenceSequenceStart Reference sequence start
* @throws org.opencb.biodata.models.alignment.exceptions.ShortReferenceSequenceException
*/
public static void completeDifferencesFromReference(Alignment alignment, String referenceSequence, long referenceSequenceStart) throws ShortReferenceSequenceException {
int offset = (int) (alignment.getUnclippedStart() - referenceSequenceStart);
String subRef;
String subRead;
if ((alignment.getFlags() & Alignment.SEGMENT_UNMAPPED) != 0) { // umnmapped, return as is
return;
}
List<Alignment.AlignmentDifference> newDifferences = new LinkedList<>();
for(Alignment.AlignmentDifference alignmentDifference : alignment.getDifferences()){
Alignment.AlignmentDifference currentDifference = null;
switch (alignmentDifference.getOp()){
case Alignment.AlignmentDifference.DELETION:
//If is a deletion, there is no seq.
try{
if(!alignmentDifference.isAllSequenceStored()){
subRef = referenceSequence.substring(
alignmentDifference.getPos() + offset,
alignmentDifference.getPos() + offset + alignmentDifference.getLength()
);
alignmentDifference.setSeq( subRef );
}
} catch (StringIndexOutOfBoundsException e){
throw new ShortReferenceSequenceException("ReferenceSequence Out of Bounds in Alignment.completeDifferences()");
}
currentDifference = alignmentDifference;
break;
case Alignment.AlignmentDifference.MATCH_MISMATCH:
case Alignment.AlignmentDifference.MISMATCH:
//
try{
subRef = referenceSequence.substring(
alignmentDifference.getPos() + offset,
alignmentDifference.getPos() + offset + alignmentDifference.getLength()
);
} catch (StringIndexOutOfBoundsException e){
throw new ShortReferenceSequenceException("ReferenceSequence Out of Bounds in Alignment.completeDifferences()");
}
subRead = alignmentDifference.getSeq();
newDifferences.addAll(getMismatchDiff(subRef, subRead, alignmentDifference.getPos()));
break;
case Alignment.AlignmentDifference.HARD_CLIPPING:
/* subRef = referenceSequence.substring(
alignmentDifference.getPos() + offset,
alignmentDifference.getPos() + offset + alignmentDifference.getLength()
);
alignmentDifference.setSeq(subRef);*/
currentDifference = alignmentDifference;
break;
case Alignment.AlignmentDifference.SOFT_CLIPPING:
currentDifference = alignmentDifference;
try{
if(alignmentDifference.isAllSequenceStored()){
subRef = referenceSequence.substring(
alignmentDifference.getPos() + offset,
alignmentDifference.getPos() + offset + alignmentDifference.getLength()
);
if(subRef.equals(alignmentDifference.getSeq())){
currentDifference.setSeq(null);
}
}
} catch (StringIndexOutOfBoundsException e){
//This Soft clipping difference will not be compacted. It's not a bug, just a feature.
//TODO: Check for 2.0 version
}
break;
//offset -= alignmentDifference.getLength();
case Alignment.AlignmentDifference.INSERTION:
case Alignment.AlignmentDifference.PADDING:
case Alignment.AlignmentDifference.SKIPPED_REGION:
//
currentDifference = alignmentDifference;
break;
}
if(currentDifference != null){
newDifferences.add(currentDifference);
}
}
alignment.setDifferences(newDifferences);
}
/**
*
* @param referenceSequence
* @param readSequence
* @param baseIndex Position of the subSequence inside the whole sequence
* @return
*/
private static List<Alignment.AlignmentDifference> getMismatchDiff(String referenceSequence, String readSequence, int baseIndex) {
List<Alignment.AlignmentDifference> differences = new LinkedList<>();
StringBuilder sb = new StringBuilder();
int foundIndex = 0;
for (int i = 0; i < Math.min(referenceSequence.length(), readSequence.length()); i++) {
if (referenceSequence.charAt(i) != readSequence.charAt(i)) {
if (sb.length() == 0) {
foundIndex = i;
}
sb.append(readSequence.charAt(i));
} else {
if (sb.length() > 0) {
Alignment.AlignmentDifference difference =
new Alignment.AlignmentDifference(baseIndex + foundIndex, Alignment.AlignmentDifference.MISMATCH, sb.toString());
differences.add(difference);
sb.setLength(0);
}
}
}
// If a mismatch was found at the end, it can't be appended inside the loop
if (sb.length() > 0) {
Alignment.AlignmentDifference difference =
new Alignment.AlignmentDifference(baseIndex + foundIndex, Alignment.AlignmentDifference.MISMATCH, sb.toString());
differences.add(difference);
}
return differences;
}
public static String getSequenceFromDifferences(List<Alignment.AlignmentDifference> differences, int sequenceSize, String referenceSequence) throws ShortReferenceSequenceException {
return getSequenceFromDifferences(differences, sequenceSize, referenceSequence, null , 0);
}
public static String getSequenceFromDifferences(List<Alignment.AlignmentDifference> differences, int sequenceSize, String referenceSequence, final int offset) throws ShortReferenceSequenceException {
return getSequenceFromDifferences(differences, sequenceSize, referenceSequence, null , offset);
}
public static String getSequenceFromDifferences(List<Alignment.AlignmentDifference> differences, int sequenceSize, String referenceSequence, Cigar cigar) throws ShortReferenceSequenceException {
return getSequenceFromDifferences(differences, sequenceSize, referenceSequence, cigar, 0);
}
public static String getSequenceFromDifferences(List<Alignment.AlignmentDifference> differences, int sequenceSize, String referenceSequence, Cigar cigar, final int offset) throws ShortReferenceSequenceException {
String sequence = "";
String subSeq;
int index = 0;
int indexRef = offset;
if(cigar == null){
cigar = new Cigar(); //Will be calculated, but not returned.
}
try {
for(Alignment.AlignmentDifference alignmentDifference : differences){
if(indexRef - offset < alignmentDifference.getPos()){
subSeq = referenceSequence.substring(indexRef, offset+alignmentDifference.getPos());
sequence += subSeq;
indexRef += subSeq.length();
index += subSeq.length();
cigar.add(new CigarElement(subSeq.length(), CigarOperator.EQ));
} else if(indexRef - offset > alignmentDifference.getPos()) {
System.out.println("[ERROR] BAD DIFFERENCES ");
}
switch (alignmentDifference.getOp()){
case Alignment.AlignmentDifference.INSERTION:
cigar.add(new CigarElement(alignmentDifference.getLength(), CigarOperator.INSERTION));
if(alignmentDifference.isAllSequenceStored()){
sequence += alignmentDifference.getSeq();
} else {
System.out.println("[WARNING] Missing insertion information");
for(int i = 0; i < alignmentDifference.getLength(); i++){
sequence += '*';
}
}
index += alignmentDifference.getLength();
break;
case Alignment.AlignmentDifference.DELETION:
cigar.add(new CigarElement(alignmentDifference.getLength(), CigarOperator.DELETION));
indexRef += alignmentDifference.getLength();
break;
case Alignment.AlignmentDifference.MATCH_MISMATCH:
case Alignment.AlignmentDifference.MISMATCH:
if(alignmentDifference.getOp() == Alignment.AlignmentDifference.MATCH_MISMATCH){
cigar.add(new CigarElement(alignmentDifference.getLength(), CigarOperator.M));
} else {
cigar.add(new CigarElement(alignmentDifference.getLength(), CigarOperator.X));
}
if(alignmentDifference.isAllSequenceStored()){
sequence += alignmentDifference.getSeq();
} else {
sequence += referenceSequence.substring(indexRef, indexRef+alignmentDifference.getLength());
}
indexRef += alignmentDifference.getLength();
index += alignmentDifference.getLength();
break;
case Alignment.AlignmentDifference.SOFT_CLIPPING:
cigar.add(new CigarElement(alignmentDifference.getLength(), CigarOperator.SOFT_CLIP));
if(alignmentDifference.isAllSequenceStored()) {
sequence += alignmentDifference.getSeq();
} else {
sequence += referenceSequence.substring(indexRef, indexRef+alignmentDifference.getLength());
}
indexRef += alignmentDifference.getLength();
index += alignmentDifference.getLength();
break;
case Alignment.AlignmentDifference.HARD_CLIPPING:
cigar.add(new CigarElement(alignmentDifference.getLength(), CigarOperator.HARD_CLIP));
indexRef += alignmentDifference.getLength(); //Increases the position in reference, but not in sequence. Hard clipping is not stored
break;
case Alignment.AlignmentDifference.SKIPPED_REGION:
cigar.add(new CigarElement(alignmentDifference.getLength(), CigarOperator.SKIPPED_REGION));
indexRef += alignmentDifference.getLength();
break;
case Alignment.AlignmentDifference.PADDING:
cigar.add(new CigarElement(alignmentDifference.getLength(), CigarOperator.PADDING));
break;
}
}
if(sequence.length() < sequenceSize){
subSeq = referenceSequence.substring(indexRef, indexRef + sequenceSize - sequence.length());
sequence += subSeq;
cigar.add(new CigarElement(subSeq.length(), CigarOperator.EQ));
} else if(index > sequenceSize) {
System.out.println("[ERROR] TOO MUCH DIFFERENCES ");
}
} catch (StringIndexOutOfBoundsException e) {
throw new ShortReferenceSequenceException("ReferenceSequence Out of Bounds in Alignment.getSequenceFromDifferences()");
}
return sequence;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.security.authorization.plugin;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hive.UtilsForTest;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.DriverFactory;
import org.apache.hadoop.hive.ql.IDriver;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
/**
* Test HiveAuthorizer api invocation for filtering objects
*/
public class TestHiveAuthorizerShowFilters {
protected static HiveConf conf;
protected static IDriver driver;
private static final String tableName1 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "table1")
.toLowerCase();
private static final String tableName2 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "table2")
.toLowerCase();
private static final String dbName1 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "db1")
.toLowerCase();
private static final String dbName2 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "db2")
.toLowerCase();
protected static HiveAuthorizer mockedAuthorizer;
static final List<String> AllTables = getSortedList(tableName1, tableName2);
static final List<String> AllDbs = getSortedList("default", dbName1, dbName2);
private static List<HivePrivilegeObject> filterArguments = new ArrayList<>();
private static List<HivePrivilegeObject> filteredResults = new ArrayList<>();
/**
* This factory creates a mocked HiveAuthorizer class. The mocked class is
* used to capture the argument passed to HiveAuthorizer.filterListCmdObjects.
* It returns fileredResults object for call to
* HiveAuthorizer.filterListCmdObjects, and stores the list argument in
* filterArguments
*/
public static class MockedHiveAuthorizerFactory implements HiveAuthorizerFactory {
/**
* Abstracts HiveAuthorizer interface for hive authorization plugins
*/
public abstract class AuthorizerWithFilterCmdImpl implements HiveAuthorizer {
@Override
public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
// capture arguments in static
filterArguments.addAll(listObjs);
// return static variable with results, if it is set to some set of
// values
// otherwise return the arguments
if (filteredResults.size() == 0) {
return filterArguments;
}
return filteredResults;
}
}
@Override
public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) {
Mockito.validateMockitoUsage();
mockedAuthorizer = Mockito.mock(AuthorizerWithFilterCmdImpl.class, Mockito.withSettings()
.verboseLogging());
try {
Mockito.when(
mockedAuthorizer.filterListCmdObjects((List<HivePrivilegeObject>) any(),
(HiveAuthzContext) any())).thenCallRealMethod();
} catch (Exception e) {
org.junit.Assert.fail("Caught exception " + e);
}
return mockedAuthorizer;
}
}
@BeforeClass
public static void beforeTest() throws Exception {
conf = new HiveConf();
// Turn on mocked authorization
conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName());
conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName());
conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
UtilsForTest.setNewDerbyDbLocation(conf, TestHiveAuthorizerShowFilters.class.getSimpleName());
SessionState ss = SessionState.start(conf);
ss.applyAuthorizationPolicy();
driver = DriverFactory.newDriver(conf);
runCmd("create table " + tableName1
+ " (i int, j int, k string) partitioned by (city string, `date` string) ");
runCmd("create table " + tableName2 + "(i int)");
runCmd("create database " + dbName1);
runCmd("create database " + dbName2);
}
@Before
public void setup() {
filterArguments.clear();
filteredResults.clear();
}
@AfterClass
public static void afterTests() throws Exception {
// Drop the tables when we're done. This makes the test work inside an IDE
runCmd("drop table if exists " + tableName1);
runCmd("drop table if exists " + tableName2);
runCmd("drop database if exists " + dbName1);
runCmd("drop database if exists " + dbName2);
driver.close();
}
@Test
public void testShowDatabasesAll() throws Exception {
runShowDbTest(AllDbs);
}
@Test
public void testShowDatabasesSelected() throws Exception {
setFilteredResults(HivePrivilegeObjectType.DATABASE, dbName2);
runShowDbTest(Arrays.asList(dbName2));
}
private void runShowDbTest(List<String> expectedDbList) throws Exception {
runCmd("show databases");
verifyAllDb();
assertEquals("filtered result check ", expectedDbList, getSortedResults());
}
@Test
public void testShowTablesAll() throws Exception {
runShowTablesTest(AllTables);
}
@Test
public void testShowTablesSelected() throws Exception {
setFilteredResults(HivePrivilegeObjectType.TABLE_OR_VIEW, tableName2);
runShowTablesTest(Arrays.asList(tableName2));
}
private void runShowTablesTest(List<String> expectedTabs) throws Exception {
runCmd("show tables");
verifyAllTables();
assertEquals("filtered result check ", expectedTabs, getSortedResults());
}
private List<String> getSortedResults() throws Exception {
List<String> res = new ArrayList<String>();
// set results to be returned
driver.getResults(res);
Collections.sort(res);
return res;
}
/**
* Verify that arguments to call to HiveAuthorizer.filterListCmdObjects are of
* type DATABASE and contain all databases.
*
* @throws HiveAccessControlException
* @throws HiveAuthzPluginException
*/
private void verifyAllDb() throws HiveAuthzPluginException, HiveAccessControlException {
List<HivePrivilegeObject> privObjs = filterArguments;
// get the db names out
List<String> dbArgs = new ArrayList<String>();
for (HivePrivilegeObject privObj : privObjs) {
assertEquals("Priv object type should be db", HivePrivilegeObjectType.DATABASE,
privObj.getType());
dbArgs.add(privObj.getDbname());
}
// sort before comparing with expected results
Collections.sort(dbArgs);
assertEquals("All db should be passed as arguments", AllDbs, dbArgs);
}
/**
* Verify that arguments to call to HiveAuthorizer.filterListCmdObjects are of
* type TABLE and contain all tables.
*
* @throws HiveAccessControlException
* @throws HiveAuthzPluginException
*/
private void verifyAllTables() throws HiveAuthzPluginException, HiveAccessControlException {
List<HivePrivilegeObject> privObjs = filterArguments;
// get the table names out
List<String> tables = new ArrayList<String>();
for (HivePrivilegeObject privObj : privObjs) {
assertEquals("Priv object type should be db", HivePrivilegeObjectType.TABLE_OR_VIEW,
privObj.getType());
assertEquals("Database name", "default", privObj.getDbname());
tables.add(privObj.getObjectName());
}
// sort before comparing with expected results
Collections.sort(tables);
assertEquals("All tables should be passed as arguments", AllTables, tables);
}
private static void setFilteredResults(HivePrivilegeObjectType type, String... objs) {
filteredResults.clear();
for (String obj : objs) {
String dbname;
String tabname = null;
if (type == HivePrivilegeObjectType.DATABASE) {
dbname = obj;
} else {
dbname = "default";
tabname = obj;
}
filteredResults.add(new HivePrivilegeObject(type, dbname, tabname));
}
}
private static void runCmd(String cmd) throws Exception {
driver.run(cmd);
}
private static List<String> getSortedList(String... strings) {
return getSortedList(Arrays.asList(strings));
}
private static List<String> getSortedList(List<String> columns) {
List<String> sortedCols = new ArrayList<String>(columns);
Collections.sort(sortedCols);
return sortedCols;
}
}
| |
package hex;
import hex.FrameTask.DataInfo;
import hex.KMeans.Initialization;
import water.*;
import water.Job.ColumnsJob;
import water.api.DocGen;
import water.api.Progress2;
import water.api.Request;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.util.RString;
import water.util.Utils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Random;
/**
* Scalable K-Means++ (KMeans||)<br>
* http://theory.stanford.edu/~sergei/papers/vldb12-kmpar.pdf<br>
* http://www.youtube.com/watch?v=cigXAxV3XcY
*/
public class KMeans2 extends ColumnsJob {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
static final String DOC_GET = "k-means";
@API(help = "Cluster initialization: None - chooses initial centers at random; Plus Plus - choose first center at random, subsequent centers chosen from probability distribution weighted so that points further from first center are more likey to be selected; Furthest - chooses initial point at random, subsequent point taken as the point furthest from prior point.", filter = Default.class, json=true)
public Initialization initialization = Initialization.None;
@API(help = "Number of clusters", required = true, filter = Default.class, lmin = 1, lmax = 100000, json=true)
public int k = 2;
@API(help = "Maximum number of iterations before stopping", required = true, filter = Default.class, lmin = 1, lmax = 100000, json=true)
public int max_iter = 100;
@API(help = "Whether data should be normalized", filter = Default.class, json=true)
public boolean normalize;
@API(help = "Seed for the random number generator", filter = Default.class, json=true)
public long seed = new Random().nextLong();
@API(help = "Drop columns with more than 20% missing values", filter = Default.class)
public boolean drop_na_cols = true;
public KMeans2() {
description = "K-means";
}
@Override protected void execImpl() {
logStart();
source.read_lock(self());
String sourceArg = input("source");
Key sourceKey = null;
if( sourceArg != null )
sourceKey = Key.make(sourceArg);
// Drop ignored cols and, if user asks for it, cols with too many NAs
Frame fr = DataInfo.prepareFrame(source, ignored_cols, false, drop_na_cols);
String[] names = fr.names();
Vec[] vecs = fr.vecs();
if(vecs == null || vecs.length == 0)
throw new IllegalArgumentException("No columns selected. Check that selected columns have not been dropped due to too many NAs.");
DataInfo dinfo = new DataInfo(fr, 0, false, normalize, false);
// Fill-in response based on K99
String[] domain = new String[k];
for( int i = 0; i < domain.length; i++ )
domain[i] = "Cluster " + i;
String[] namesResp = Utils.append(names, "response");
String[][] domaiResp = (String[][]) Utils.append((new Frame(names, vecs)).domains(), (Object) domain);
KMeans2Model model = new KMeans2Model(this, destination_key, sourceKey, namesResp, domaiResp);
model.delete_and_lock(self());
model.k = k; model.normalized = normalize; model.max_iter = max_iter;
// TODO remove when stats are propagated with vecs?
double[] means = new double[vecs.length];
double[] mults = normalize ? new double[vecs.length] : null;
for( int i = 0; i < vecs.length; i++ ) {
means[i] = (float) vecs[i].mean();
if( mults != null ) {
double sigma = vecs[i].sigma();
mults[i] = normalize(sigma) ? 1.0 / sigma : 1.0;
}
}
// -1 to be different from all chunk indexes (C.f. Sampler)
Random rand = Utils.getRNG(seed - 1);
double[][] clusters;
if( initialization == Initialization.None ) {
// Initialize all clusters to random rows
clusters = new double[k][vecs.length];
for (double[] cluster : clusters)
randomRow(vecs, rand, cluster, means, mults);
} else {
// Initialize first cluster to random row
clusters = new double[1][];
clusters[0] = new double[vecs.length];
randomRow(vecs, rand, clusters[0], means, mults);
while( model.iterations < 5 ) {
// Sum squares distances to clusters
SumSqr sqr = new SumSqr();
sqr._clusters = clusters;
sqr._means = means;
sqr._mults = mults;
sqr.doAll(vecs);
// Sample with probability inverse to square distance
Sampler sampler = new Sampler();
sampler._clusters = clusters;
sampler._sqr = sqr._sqr;
sampler._probability = k * 3; // Over-sampling
sampler._seed = seed;
sampler._means = means;
sampler._mults = mults;
sampler.doAll(vecs);
clusters = Utils.append(clusters, sampler._sampled);
if( !isRunning(self()) )
return;
model.centers = normalize ? denormalize(clusters, vecs) : clusters;
model.total_within_SS = sqr._sqr;
model.iterations++;
model.update(self());
}
clusters = recluster(clusters, k, rand, initialization);
}
for( ;; ) {
Lloyds task = new Lloyds();
task._clusters = clusters;
task._means = means;
task._mults = mults;
task._ncats = dinfo._cats;
task._nnums = dinfo._nums;
task.doAll(vecs);
model.centers = clusters = normalize ? denormalize(task._cMeans, vecs) : task._cMeans;
model.between_cluster_variances = task._betwnSqrs;
double[] variances = new double[task._cSqrs.length];
for( int clu = 0; clu < task._cSqrs.length; clu++ )
for( int col = 0; col < task._cSqrs[clu].length; col++ )
variances[clu] += task._cSqrs[clu][col];
double between_cluster_SS = 0.0;
for (int clu = 0; clu < task._betwnSqrs.length; clu++)
between_cluster_SS += task._betwnSqrs[clu];
model.between_cluster_SS = between_cluster_SS;
model.within_cluster_variances = variances;
model.total_within_SS = task._sqr;
model.total_SS = model.total_within_SS + model.between_cluster_SS;
model.size = task._rows;
model.iterations++;
model.update(self());
if( model.iterations >= max_iter ) {
Clusters cc = new Clusters();
cc._clusters = clusters;
cc._means = means;
cc._mults = mults;
cc.doAll(1, vecs);
Frame fr2 = cc.outputFrame(model._clustersKey,new String[]{"Cluster ID"}, new String[][] { Utils.toStringMap(0,cc._clusters.length-1) } );
fr2.delete_and_lock(self()).unlock(self());
break;
}
if( !isRunning(self()) )
break;
}
model.unlock(self());
source.unlock(self());
return;
}
@Override protected Response redirect() {
return KMeans2Progress.redirect(this, job_key, destination_key);
}
public static class KMeans2Progress extends Progress2 {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@Override protected Response jobDone(Key dst) {
return KMeans2ModelView.redirect(this, destination_key);
}
public static Response redirect(Request req, Key job_key, Key destination_key) {
return Response.redirect(req, new KMeans2Progress().href(), JOB_KEY, job_key, DEST_KEY, destination_key);
}
}
public static class KMeans2ModelView extends Request2 {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@API(help = "KMeans2 Model", json = true, filter = Default.class)
public KMeans2Model model;
public static String link(String txt, Key model) {
return "<a href='" + new KMeans2ModelView().href() + ".html?model=" + model + "'>" + txt + "</a>";
}
public static Response redirect(Request req, Key model) {
return Response.redirect(req, new KMeans2ModelView().href(), "model", model);
}
@Override protected Response serve() {
return Response.done(this);
}
@Override public boolean toHTML(StringBuilder sb) {
if( model != null ) {
model.parameters.makeJsonBox(sb);
DocGen.HTML.section(sb, "Cluster Centers: "); //"Total Within Cluster Sum of Squares: " + model.total_within_SS);
table(sb, "Clusters", model._names, model.centers);
double[][] rows = new double[model.within_cluster_variances.length][1];
for( int i = 0; i < rows.length; i++ )
rows[i][0] = model.within_cluster_variances[i];
columnHTMLlong(sb, "Cluster Size", model.size);
DocGen.HTML.section(sb, "Cluster Variances: ");
table(sb, "Clusters", new String[]{"Within Cluster Variances"}, rows);
columnHTML(sb, "Between Cluster Variances", model.between_cluster_variances);
sb.append("<br />");
DocGen.HTML.section(sb, "Overall Totals: ");
double[] row = new double[]{model.total_SS, model.total_within_SS, model.between_cluster_SS};
rowHTML(sb, new String[]{"Total Sum of Squares", "Total Within Cluster Sum of Squares", "Between Cluster Sum of Squares"}, row);
DocGen.HTML.section(sb, "Cluster Assignments by Observation: ");
RString rs = new RString("<a href='Inspect2.html?src_key=%$key'>%content</a>");
rs.replace("key", model._key + "_clusters");
rs.replace("content", "View the row-by-row cluster assignments");
sb.append(rs.toString());
//sb.append("<iframe src=\"" + "/Inspect.html?key=KMeansClusters\"" + "width = \"850\" height = \"550\" marginwidth=\"25\" marginheight=\"25\" scrolling=\"yes\"></iframe>" );
return true;
}
return false;
}
private static void rowHTML(StringBuilder sb, String[] header, double[] ro) {
sb.append("<span style='display: inline-block; '>");
sb.append("<table class='table table-striped table-bordered'>");
sb.append("<tr>");
for (String aHeader : header) sb.append("<th>").append(aHeader).append("</th>");
sb.append("</tr>");
sb.append("<tr>");
for (double row : ro) {
sb.append("<td>").append(ElementBuilder.format(row)).append("</td>");
}
sb.append("</tr>");
sb.append("</table></span>");
}
private static void columnHTML(StringBuilder sb, String name, double[] rows) {
sb.append("<span style='display: inline-block; '>");
sb.append("<table class='table table-striped table-bordered'>");
sb.append("<tr>");
sb.append("<th>").append(name).append("</th>");
sb.append("</tr>");
sb.append("<tr>");
for (double row : rows) {
sb.append("<tr>");
sb.append("<td>").append(ElementBuilder.format(row)).append("</td>");
sb.append("</tr>");
}
sb.append("</table></span>");
}
private static void columnHTMLlong(StringBuilder sb, String name, long[] rows) {
sb.append("<span style='display: inline-block; '>");
sb.append("<table class='table table-striped table-bordered'>");
sb.append("<tr>");
sb.append("<th>").append(name).append("</th>");
sb.append("</tr>");
sb.append("<tr>");
for (double row : rows) {
sb.append("<tr>");
sb.append("<td>").append(ElementBuilder.format(row)).append("</td>");
sb.append("</tr>");
}
sb.append("</table></span>");
}
private static void table(StringBuilder sb, String title, String[] names, double[][] rows) {
sb.append("<span style='display: inline-block;'>");
sb.append("<table class='table table-striped table-bordered'>");
sb.append("<tr>");
sb.append("<th>").append(title).append("</th>");
for( int i = 0; names != null && i < rows[0].length; i++ )
sb.append("<th>").append(names[i]).append("</th>");
sb.append("</tr>");
for( int r = 0; r < rows.length; r++ ) {
sb.append("<tr>");
sb.append("<td>").append(r).append("</td>");
for( int c = 0; c < rows[r].length; c++ )
sb.append("<td>").append(ElementBuilder.format(rows[r][c])).append("</td>");
sb.append("</tr>");
}
sb.append("</table></span>");
}
}
public static class KMeans2Model extends Model implements Progress {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@API(help = "Model parameters")
private final KMeans2 parameters; // This is used purely for printing values out.
@API(help = "Cluster centers, always denormalized")
public double[][] centers;
@API(help = "Sum of within cluster sum of squares")
public double total_within_SS;
@API(help = "Between cluster sum of square distances")
public double between_cluster_SS;
@API(help = "Total Sum of squares = total_within_SS + betwen_cluster_SS")
public double total_SS;
@API(help = "Number of clusters")
public int k;
@API(help = "Numbers of observations in each cluster.")
public long[] size;
@API(help = "Whether data was normalized")
public boolean normalized;
@API(help = "Maximum number of iterations before stopping")
public int max_iter = 100;
@API(help = "Iterations the algorithm ran")
public int iterations;
@API(help = "Within cluster sum of squares per cluster")
public double[] within_cluster_variances;
@API(help = "Between Cluster square distances per cluster")
public double[] between_cluster_variances;
@API(help = "The row-by-row cluster assignments")
public final Key _clustersKey;
// Normalization caches
private transient double[][] _normClust;
private transient double[] _means, _mults;
private transient int _ncats, _nnums;
public KMeans2Model(KMeans2 params, Key selfKey, Key dataKey, String names[], String domains[][]) {
super(selfKey, dataKey, names, domains);
parameters = params;
_clustersKey = Key.make(selfKey.toString() + "_clusters");
}
@Override public double mse() { return total_within_SS; }
@Override public float progress() {
return Math.min(1f, iterations / (float) max_iter);
}
@Override protected float[] score0(Chunk[] chunks, int rowInChunk, double[] tmp, float[] preds) {
// If only one cluster, then everything is trivially assigned to it
if(preds.length == 1) {
preds[0] = 0;
return preds;
}
double[][] cs = centers;
int numInputCols = tmp.length-1; // -1 as there is no response column here
if( normalized && _normClust == null )
cs = _normClust = normalize(centers, chunks);
if( _means == null ) {
_means = new double[numInputCols];
for( int i = 0; i < numInputCols; i++ )
_means[i] = chunks[i]._vec.mean();
}
if( normalized && _mults == null ) {
_mults = new double[numInputCols];
for( int i = 0; i < numInputCols; i++ ) {
double sigma = chunks[i]._vec.sigma();
_mults[i] = normalize(sigma) ? 1 / sigma : 1;
}
}
data(tmp, chunks, rowInChunk, _means, _mults);
Arrays.fill(preds, 0);
// int cluster = closest(cs, tmp, new ClusterDist())._cluster;
int cluster = closest(cs, tmp, _ncats, new ClusterDist())._cluster;
preds[0] = cluster; // prediction in preds[0]
preds[1+cluster] = 1; // class distribution
return preds;
}
@Override protected float[] score0(double[] data, float[] preds) {
throw new UnsupportedOperationException();
}
/** Remove any Model internal Keys */
@Override public Futures delete_impl(Futures fs) {
Lockable.delete(_clustersKey);
return fs;
}
}
public class Clusters extends MRTask2<Clusters> {
// IN
double[][] _clusters; // Cluster centers
double[] _means, _mults; // Normalization
int _ncats, _nnums;
@Override public void map(Chunk[] cs, NewChunk ncs) {
double[] values = new double[_clusters[0].length];
ClusterDist cd = new ClusterDist();
for (int row = 0; row < cs[0]._len; row++) {
data(values, cs, row, _means, _mults);
// closest(_clusters, values, cd);
closest(_clusters, values, _ncats, cd);
int clu = cd._cluster;
// ncs[0].addNum(clu);
ncs.addEnum(clu);
}
}
}
public static class SumSqr extends MRTask2<SumSqr> {
// IN
double[] _means, _mults; // Normalization
double[][] _clusters;
// OUT
double _sqr;
@Override public void map(Chunk[] cs) {
double[] values = new double[cs.length];
ClusterDist cd = new ClusterDist();
for( int row = 0; row < cs[0]._len; row++ ) {
data(values, cs, row, _means, _mults);
_sqr += minSqr(_clusters, values, cd);
}
_means = _mults = null;
_clusters = null;
}
@Override public void reduce(SumSqr other) {
_sqr += other._sqr;
}
}
public static class Sampler extends MRTask2<Sampler> {
// IN
double[][] _clusters;
double _sqr; // Min-square-error
double _probability; // Odds to select this point
long _seed;
double[] _means, _mults; // Normalization
// OUT
double[][] _sampled; // New clusters
@Override public void map(Chunk[] cs) {
double[] values = new double[cs.length];
ArrayList<double[]> list = new ArrayList<double[]>();
Random rand = Utils.getRNG(_seed + cs[0]._start);
ClusterDist cd = new ClusterDist();
for( int row = 0; row < cs[0]._len; row++ ) {
data(values, cs, row, _means, _mults);
double sqr = minSqr(_clusters, values, cd);
if( _probability * sqr > rand.nextDouble() * _sqr )
list.add(values.clone());
}
_sampled = new double[list.size()][];
list.toArray(_sampled);
_clusters = null;
_means = _mults = null;
}
@Override public void reduce(Sampler other) {
_sampled = Utils.append(_sampled, other._sampled);
}
}
public static class Lloyds extends MRTask2<Lloyds> {
// IN
double[][] _clusters;
double[] _means, _mults; // Normalization
int _ncats, _nnums;
// OUT
double[][] _cMeans, _cSqrs; // Means and sum of squares for each cluster
double[] _betwnSqrs; // Between cluster squares
double[] _gm; // Grand Mean (mean of means)
long[] _rows; // Rows per cluster
double _sqr; // Total sqr distance
@Override public void map(Chunk[] cs) {
_cMeans = new double[_clusters.length][_clusters[0].length];
_cSqrs = new double[_clusters.length][_clusters[0].length];
_betwnSqrs = new double[_clusters.length];
_rows = new long[_clusters.length];
_gm = new double[_clusters[0].length];
// Find closest cluster for each row
double[] values = new double[_clusters[0].length];
ClusterDist cd = new ClusterDist();
int[] clusters = new int[cs[0]._len];
for( int row = 0; row < cs[0]._len; row++ ) {
data(values, cs, row, _means, _mults);
// closest(_clusters, values, cd);
closest(_clusters, values, _ncats, cd);
int clu = clusters[row] = cd._cluster;
_sqr += cd._dist;
if( clu == -1 )
continue; // Ignore broken row
// Add values and increment counter for chosen cluster
for( int col = 0; col < values.length; col++ )
_cMeans[clu][col] += values[col];
_rows[clu]++;
}
int[] validMeans = new int[_gm.length];
for( int clu = 0; clu < _cMeans.length; clu++ )
for( int col = 0; col < _cMeans[clu].length; col++ ) {
if(_rows[clu] != 0) {
_cMeans[clu][col] /= _rows[clu];
_gm[col] += _cMeans[clu][col];
validMeans[col]++;
}
}
for (int col = 0; col < _gm.length; col++)
if(validMeans[col] != 0)
_gm[col] /= validMeans[col];
for (int clu = 0; clu < _cMeans.length; clu++)
for (int col = 0; col < _gm.length; col++) {
double mean_delta = _cMeans[clu][col] - _gm[col];
_betwnSqrs[clu] += _rows[clu] * mean_delta * mean_delta;
}
// Second pass for in-cluster variances
for( int row = 0; row < cs[0]._len; row++ ) {
int clu = clusters[row];
if( clu == -1 )
continue;
data(values, cs, row, _means, _mults);
for( int col = 0; col < values.length; col++ ) {
double delta = values[col] - _cMeans[clu][col];
_cSqrs[clu][col] += delta * delta;
}
}
_clusters = null;
_means = _mults = null;
}
@Override public void reduce(Lloyds mr) {
for( int clu = 0; clu < _cMeans.length; clu++ )
Layer.Stats.reduce(_cMeans[clu], _cSqrs[clu], _rows[clu], mr._cMeans[clu], mr._cSqrs[clu], mr._rows[clu]);
Utils.add(_rows, mr._rows);
_sqr += mr._sqr;
}
}
private static final class ClusterDist {
int _cluster;
double _dist;
}
private static double minSqr(double[][] clusters, double[] point, ClusterDist cd) {
return closest(clusters, point, cd, clusters.length)._dist;
}
private static double minSqr(double[][] clusters, double[] point, ClusterDist cd, int count) {
return closest(clusters, point, cd, count)._dist;
}
private static ClusterDist closest(double[][] clusters, double[] point, ClusterDist cd) {
return closest(clusters, point, cd, clusters.length);
}
private static ClusterDist closest(double[][] clusters, double[] point, int ncats, ClusterDist cd) {
return closest(clusters, point, ncats, cd, clusters.length);
}
private static ClusterDist closest(double[][] clusters, double[] point, ClusterDist cd, int count) {
return closest(clusters, point, 0, cd, count);
}
private static ClusterDist closest(double[][] clusters, double[] point, int ncats, ClusterDist cd, int count) {
return closest(clusters, point, ncats, cd, count, 1);
}
/** Return both nearest of N cluster/centroids, and the square-distance. */
private static ClusterDist closest(double[][] clusters, double[] point, int ncats, ClusterDist cd, int count, double dist) {
int min = -1;
double minSqr = Double.MAX_VALUE;
for( int cluster = 0; cluster < count; cluster++ ) {
double sqr = 0; // Sum of dimensional distances
int pts = point.length; // Count of valid points
// Expand categoricals into binary indicator cols
for(int column = 0; column < ncats; column++) {
double d = point[column];
if(Double.isNaN(d))
pts--;
else {
// TODO: What is the distance between unequal categoricals?
if(d != clusters[cluster][column])
sqr += 2 * dist * dist;
}
}
for( int column = ncats; column < clusters[cluster].length; column++ ) {
double d = point[column];
if( Double.isNaN(d) ) { // Bad data?
pts--; // Do not count
} else {
double delta = d - clusters[cluster][column];
sqr += delta * delta;
}
}
// Scale distance by ratio of valid dimensions to all dimensions - since
// we did not add any error term for the missing point, the sum of errors
// is small - ratio up "as if" the missing error term is equal to the
// average of other error terms. Same math another way:
// double avg_dist = sqr / pts; // average distance per feature/column/dimension
// sqr = sqr * point.length; // Total dist is average*#dimensions
if( 0 < pts && pts < point.length )
sqr *= point.length / pts;
if( sqr < minSqr ) {
min = cluster;
minSqr = sqr;
}
}
cd._cluster = min; // Record nearest cluster
cd._dist = minSqr; // Record square-distance
return cd; // Return for flow-coding
}
// KMeans++ re-clustering
public static double[][] recluster(double[][] points, int k, Random rand, Initialization init) {
double[][] res = new double[k][];
res[0] = points[0];
int count = 1;
ClusterDist cd = new ClusterDist();
switch( init ) {
case None:
break;
case PlusPlus: { // k-means++
while( count < res.length ) {
double sum = 0;
for (double[] point1 : points) sum += minSqr(res, point1, cd, count);
for (double[] point : points) {
if (minSqr(res, point, cd, count) >= rand.nextDouble() * sum) {
res[count++] = point;
break;
}
}
}
break;
}
case Furthest: { // Takes cluster further from any already chosen ones
while( count < res.length ) {
double max = 0;
int index = 0;
for( int i = 0; i < points.length; i++ ) {
double sqr = minSqr(res, points[i], cd, count);
if( sqr > max ) {
max = sqr;
index = i;
}
}
res[count++] = points[index];
}
break;
}
default:
throw new IllegalStateException();
}
return res;
}
private void randomRow(Vec[] vecs, Random rand, double[] cluster, double[] means, double[] mults) {
long row = Math.max(0, (long) (rand.nextDouble() * vecs[0].length()) - 1);
data(cluster, vecs, row, means, mults);
}
private static boolean normalize(double sigma) {
// TODO unify handling of constant columns
return sigma > 1e-6;
}
private static double[][] normalize(double[][] clusters, Chunk[] chks) {
double[][] value = new double[clusters.length][clusters[0].length];
for( int row = 0; row < value.length; row++ ) {
for( int col = 0; col < clusters[row].length; col++ ) {
double d = clusters[row][col];
Vec vec = chks[col]._vec;
d -= vec.mean();
d /= normalize(vec.sigma()) ? vec.sigma() : 1;
value[row][col] = d;
}
}
return value;
}
private static double[][] denormalize(double[][] clusters, Vec[] vecs) {
double[][] value = new double[clusters.length][clusters[0].length];
for( int row = 0; row < value.length; row++ ) {
for( int col = 0; col < clusters[row].length; col++ ) {
double d = clusters[row][col];
d *= vecs[col].sigma();
d += vecs[col].mean();
value[row][col] = d;
}
}
return value;
}
private static void data(double[] values, Vec[] vecs, long row, double[] means, double[] mults) {
for( int i = 0; i < values.length; i++ ) {
double d = vecs[i].at(row);
// values[i] = data(d, i, means, mults);
values[i] = data(d, i, means, mults, vecs[i].cardinality());
}
}
private static void data(double[] values, Chunk[] chks, int row, double[] means, double[] mults) {
for( int i = 0; i < values.length; i++ ) {
double d = chks[i].at0(row);
// values[i] = data(d, i, means, mults);
values[i] = data(d, i, means, mults, chks[i]._vec.cardinality());
}
}
/**
* Takes mean if NaN, normalize if requested.
*/
private static double data(double d, int i, double[] means, double[] mults) {
if( Double.isNaN(d) )
d = means[i];
if( mults != null ) {
d -= means[i];
d *= mults[i];
}
return d;
}
private static double data(double d, int i, double[] means, double[] mults, int cardinality) {
if(cardinality == -1) {
if( Double.isNaN(d) )
d = means[i];
if( mults != null ) {
d -= means[i];
d *= mults[i];
}
} else {
// TODO: If NaN, then replace with majority class?
if(Double.isNaN(d))
d = Math.min(Math.round(means[i]), cardinality-1);
}
return d;
}
}
| |
package com.mgilangjanuar.dev.goscele.modules.main.provider;
import com.mgilangjanuar.dev.goscele.base.BaseProvider;
import com.mgilangjanuar.dev.goscele.modules.common.model.UserModel;
import com.mgilangjanuar.dev.goscele.modules.main.adapter.ScheduleDailyDetailRecyclerViewAdapter;
import com.mgilangjanuar.dev.goscele.modules.main.adapter.ScheduleDailyRecyclerViewAdapter;
import com.mgilangjanuar.dev.goscele.modules.main.listener.ScheduleDailyDetailListener;
import com.mgilangjanuar.dev.goscele.modules.main.listener.ScheduleDailyListener;
import com.mgilangjanuar.dev.goscele.modules.main.model.ScheduleCourseModel;
import com.mgilangjanuar.dev.goscele.modules.main.model.ScheduleDailyGroupModel;
import com.mgilangjanuar.dev.goscele.modules.main.model.ScheduleDailyModel;
import com.mgilangjanuar.dev.goscele.utils.Constant;
import org.jsoup.Connection;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Created by mgilangjanuar (mgilangjanuar@gmail.com)
*
* @since 2017
*/
public class DailyProvider {
private ScheduleDailyListener dailyListener;
private ScheduleDailyDetailListener dailyDetailListener;
public DailyProvider(ScheduleDailyListener dailyListener, ScheduleDailyDetailListener dailyDetailListener) {
this.dailyListener = dailyListener;
this.dailyDetailListener = dailyDetailListener;
}
public void create() {
new Auth(dailyListener, dailyDetailListener).run();
}
public static class Auth extends BaseProvider {
private ScheduleDailyListener dailyListener;
private ScheduleDailyDetailListener dailyDetailListener;
public Auth(ScheduleDailyListener dailyListener, ScheduleDailyDetailListener dailyDetailListener) {
this.dailyListener = dailyListener;
this.dailyDetailListener = dailyDetailListener;
}
@Override
public void run() {
execute("");
}
@Override
public String url() {
return Constant.BASE_URL_SIAK + Constant.ROUTE_LOGIN_SIAK;
}
@Override
public Connection.Method method() {
return Connection.Method.POST;
}
@Override
public String[] data() {
UserModel userModel = new UserModel().find().executeSingle();
return new String[]{"u", userModel.username, "p", userModel.password};
}
@Override
protected void onPostExecute(List<Elements> elements) {
super.onPostExecute(elements);
new ChangeRole(dailyListener, dailyDetailListener, cookies).run();
}
}
public static class ChangeRole extends BaseProvider {
private ScheduleDailyListener dailyListener;
private ScheduleDailyDetailListener dailyDetailListener;
private Map<String, String> cookiesSiak;
public ChangeRole(ScheduleDailyListener dailyListener, ScheduleDailyDetailListener dailyDetailListener, Map<String, String> cookiesSiak) {
this.dailyListener = dailyListener;
this.dailyDetailListener = dailyDetailListener;
this.cookiesSiak = cookiesSiak;
}
@Override
public void run() {
execute("");
}
@Override
public String url() {
return Constant.BASE_URL_SIAK + "Authentication/ChangeRole";
}
@Override
public Map<String, String> cookies() {
return cookiesSiak;
}
@Override
protected void onPostExecute(List<Elements> elements) {
super.onPostExecute(elements);
new Schedule(dailyListener, cookies()).run();
new Class(dailyDetailListener, cookies()).run();
}
}
public static class Class extends BaseProvider {
private ScheduleDailyDetailListener listener;
private Map<String, String> cookiesSiak;
public Class(ScheduleDailyDetailListener listener, Map<String, String> cookiesSiak) {
this.listener = listener;
this.cookiesSiak = cookiesSiak;
}
@Override
public void run() {
execute(".box tbody tr");
}
@Override
public Map<String, String> cookies() {
return cookiesSiak;
}
@Override
public String url() {
return Constant.BASE_URL_SIAK + "CoursePlan/CoursePlanViewClass";
}
@Override
protected void onPostExecute(List<Elements> elementses) {
super.onPostExecute(elementses);
try {
Elements elements = elementses.get(0);
List<ScheduleCourseModel> models = new ArrayList<>();
int i = 0;
for (Element e: elements) {
if (i > 1) {
ScheduleCourseModel model = new ScheduleCourseModel();
model.code = e.select("td:eq(1) > a").text();
model.courseName = e.select("td:eq(2) > a").text();
model.courseClass = e.select("td:eq(2) > span").text();
model.credits = e.select("td:eq(3)").text();
model.lecturer = e.select("td:eq(7)").text();
model.save();
models.add(model);
}
i++;
}
ScheduleDailyDetailRecyclerViewAdapter adapter = new ScheduleDailyDetailRecyclerViewAdapter(models);
listener.onRetrieveCourseDetail(adapter);
} catch (Exception e) {
listener.onError(e.getMessage());
}
}
}
public static class Schedule extends BaseProvider {
private ScheduleDailyListener listener;
private Map<String, String> cookiesSiak;
public Schedule(ScheduleDailyListener listener, Map<String, String> cookiesSiak) {
this.listener = listener;
this.cookiesSiak = cookiesSiak;
}
@Override
public void run() {
execute(".box.cal tbody tr:eq(1) td");
}
@Override
public Map<String, String> cookies() {
return cookiesSiak;
}
@Override
public String url() {
return Constant.BASE_URL_SIAK + "CoursePlan/CoursePlanViewSchedule";
}
@Override
protected void onPostExecute(List<Elements> elementses) {
try {
super.onPostExecute(elementses);
Elements elements = elementses.get(0);
int i = 0;
List<ScheduleDailyGroupModel> results = new ArrayList<>();
for (Element e: elements) {
if (i != 0 && i != 7) {
String day = null;
switch (i) {
case 1:
day = "Monday";
break;
case 2:
day = "Tuesday";
break;
case 3:
day = "Wednesday";
break;
case 4:
day = "Thursday";
break;
case 5:
day = "Friday";
break;
case 6:
day = "Saturday";
break;
}
ScheduleDailyGroupModel dailyGroupModel = new ScheduleDailyGroupModel();
dailyGroupModel.day = day;
dailyGroupModel.save();
for (Element e1: e.select(".sch-inner")) {
ScheduleDailyModel dailyModel = new ScheduleDailyModel();
dailyModel.time = e1.select("h3").text();
dailyModel.desc = e1.select(".desc p").text();
dailyModel.scheduleDailyGroupModel = dailyGroupModel;
dailyModel.save();
}
results.add(dailyGroupModel);
}
i++;
}
listener.onRetrieveDailySchedule(new ScheduleDailyRecyclerViewAdapter(results));
} catch (Exception e) {
listener.onError(e.getMessage());
}
}
}
}
| |
/**********************************************************************
* Copyright (c) 2014 HubSpot Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**********************************************************************/
package com.hubspot.jinjava.interpret;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.SetMultimap;
import com.hubspot.jinjava.lib.Importable;
import com.hubspot.jinjava.lib.exptest.ExpTest;
import com.hubspot.jinjava.lib.exptest.ExpTestLibrary;
import com.hubspot.jinjava.lib.filter.Filter;
import com.hubspot.jinjava.lib.filter.FilterLibrary;
import com.hubspot.jinjava.lib.fn.ELFunctionDefinition;
import com.hubspot.jinjava.lib.fn.FunctionLibrary;
import com.hubspot.jinjava.lib.fn.MacroFunction;
import com.hubspot.jinjava.lib.tag.Tag;
import com.hubspot.jinjava.lib.tag.TagLibrary;
import com.hubspot.jinjava.util.ScopeMap;
public class Context extends ScopeMap<String, Object> {
public static final String GLOBAL_MACROS_SCOPE_KEY = "__macros__";
private final SetMultimap<String, String> dependencies = HashMultimap.create();
private final ExpTestLibrary expTestLibrary;
private final FilterLibrary filterLibrary;
private final FunctionLibrary functionLibrary;
private final TagLibrary tagLibrary;
private final Context parent;
public Context() {
this(null);
}
public Context(Context parent) {
super(parent);
this.parent = parent;
this.expTestLibrary = new ExpTestLibrary(parent == null);
this.filterLibrary = new FilterLibrary(parent == null);
this.functionLibrary = new FunctionLibrary(parent == null);
this.tagLibrary = new TagLibrary(parent == null);
}
public Context(Context parent, Map<String, ?> bindings) {
this(parent);
this.putAll(bindings);
}
@Override
public Context getParent() {
return parent;
}
public Map<String, Object> getSessionBindings() {
return this.getScope();
}
@SuppressWarnings("unchecked")
public Map<String, MacroFunction> getGlobalMacros() {
Map<String, MacroFunction> macros = (Map<String, MacroFunction>) getScope().get(GLOBAL_MACROS_SCOPE_KEY);
if (macros == null) {
macros = new HashMap<>();
getScope().put(GLOBAL_MACROS_SCOPE_KEY, macros);
}
return macros;
}
public void addGlobalMacro(MacroFunction macro) {
getGlobalMacros().put(macro.getName(), macro);
}
public MacroFunction getGlobalMacro(String identifier) {
MacroFunction fn = getGlobalMacros().get(identifier);
if (fn == null && parent != null) {
fn = parent.getGlobalMacro(identifier);
}
return fn;
}
public boolean isGlobalMacro(String identifier) {
return getGlobalMacro(identifier) != null;
}
@SafeVarargs
@SuppressWarnings("unchecked")
public final void registerClasses(Class<? extends Importable>... classes) {
for (Class<? extends Importable> c : classes) {
if (ExpTest.class.isAssignableFrom(c)) {
expTestLibrary.registerClasses((Class<? extends ExpTest>) c);
} else if (Filter.class.isAssignableFrom(c)) {
filterLibrary.registerClasses((Class<? extends Filter>) c);
} else if (Tag.class.isAssignableFrom(c)) {
tagLibrary.registerClasses((Class<? extends Tag>) c);
}
}
}
public Collection<ExpTest> getAllExpTests() {
List<ExpTest> expTests = new ArrayList<>(expTestLibrary.entries());
if (parent != null) {
expTests.addAll(parent.getAllExpTests());
}
return expTests;
}
public ExpTest getExpTest(String name) {
ExpTest t = expTestLibrary.getExpTest(name);
if (t != null) {
return t;
}
if (parent != null) {
return parent.getExpTest(name);
}
return null;
}
public void registerExpTest(ExpTest t) {
expTestLibrary.addExpTest(t);
}
public Collection<Filter> getAllFilters() {
List<Filter> filters = new ArrayList<>(filterLibrary.entries());
if (parent != null) {
filters.addAll(parent.getAllFilters());
}
return filters;
}
public Filter getFilter(String name) {
Filter f = filterLibrary.getFilter(name);
if (f != null) {
return f;
}
if (parent != null) {
return parent.getFilter(name);
}
return null;
}
public void registerFilter(Filter f) {
filterLibrary.addFilter(f);
}
public ELFunctionDefinition getFunction(String name) {
ELFunctionDefinition f = functionLibrary.getFunction(name);
if (f != null) {
return f;
}
if (parent != null) {
return parent.getFunction(name);
}
return null;
}
public Collection<ELFunctionDefinition> getAllFunctions() {
List<ELFunctionDefinition> fns = new ArrayList<>(functionLibrary.entries());
if (parent != null) {
fns.addAll(parent.getAllFunctions());
}
return fns;
}
public void registerFunction(ELFunctionDefinition f) {
functionLibrary.addFunction(f);
}
public Collection<Tag> getAllTags() {
List<Tag> tags = new ArrayList<>(tagLibrary.entries());
if (parent != null) {
tags.addAll(parent.getAllTags());
}
return tags;
}
public Tag getTag(String name) {
Tag t = tagLibrary.getTag(name);
if (t != null) {
return t;
}
if (parent != null) {
return parent.getTag(name);
}
return null;
}
public void registerTag(Tag t) {
tagLibrary.addTag(t);
}
public void addDependency(String type, String identification) {
Context highestParentContext = getHighestParentContext();
highestParentContext.dependencies.get(type).add(identification);
}
public SetMultimap<String, String> getDependencies() {
Context highestParentContext = getHighestParentContext();
return highestParentContext.dependencies;
}
private Context getHighestParentContext() {
Context highestParentContext = parent != null ? parent : this;
Context currentParentContext = highestParentContext.getParent();
while (currentParentContext != null) {
if (currentParentContext.equals(currentParentContext.getParent())) {
return highestParentContext;
}
highestParentContext = highestParentContext.getParent();
currentParentContext = highestParentContext.getParent();
}
return highestParentContext;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.federation.fairness;
import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.URI;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.server.federation.MiniRouterDFSCluster.RouterContext;
import org.apache.hadoop.hdfs.server.federation.RouterConfigBuilder;
import org.apache.hadoop.hdfs.server.federation.StateStoreDFSCluster;
import org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException;
import org.junit.After;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test the Router handlers fairness control rejects and accepts requests.
*/
public class TestRouterHandlersFairness {
private static final Logger LOG =
LoggerFactory.getLogger(TestRouterHandlersFairness.class);
private StateStoreDFSCluster cluster;
@After
public void cleanup() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
private void setupCluster(boolean fairnessEnable, boolean ha)
throws Exception {
// Build and start a federated cluster
cluster = new StateStoreDFSCluster(ha, 2);
Configuration routerConf = new RouterConfigBuilder()
.stateStore()
.rpc()
.build();
// Fairness control
if (fairnessEnable) {
routerConf.setClass(
RBFConfigKeys.DFS_ROUTER_FAIRNESS_POLICY_CONTROLLER_CLASS,
StaticRouterRpcFairnessPolicyController.class,
RouterRpcFairnessPolicyController.class);
}
// With two name services configured, each nameservice has 1 permit and
// fan-out calls have 1 permit.
routerConf.setInt(RBFConfigKeys.DFS_ROUTER_HANDLER_COUNT_KEY, 3);
// Datanodes not needed for this test.
cluster.setNumDatanodesPerNameservice(0);
cluster.addRouterOverrides(routerConf);
cluster.startCluster();
cluster.startRouters();
cluster.waitClusterUp();
}
@Test
public void testFairnessControlOff() throws Exception {
setupCluster(false, false);
startLoadTest(false);
}
@Test
public void testFairnessControlOn() throws Exception {
setupCluster(true, false);
startLoadTest(true);
}
/**
* Start a generic load test as a client against a cluster which has either
* fairness configured or not configured. Test will spawn a set of 100
* threads to simulate concurrent request to test routers. If fairness is
* enabled, the test will successfully report the failure of some threads
* to continue with StandbyException. If fairness is not configured, all
* threads of the same test should successfully complete all the rpcs.
*
* @param fairness Flag to indicate if fairness management is on/off.
* @throws Exception Throws exception.
*/
private void startLoadTest(boolean fairness)
throws Exception {
// Concurrent requests
startLoadTest(true, fairness);
// Sequential requests
startLoadTest(false, fairness);
}
private void startLoadTest(final boolean isConcurrent, final boolean fairness)
throws Exception {
RouterContext routerContext = cluster.getRandomRouter();
URI address = routerContext.getFileSystemURI();
Configuration conf = new HdfsConfiguration();
final int numOps = 10;
AtomicInteger overloadException = new AtomicInteger();
// Test when handlers are overloaded
if (fairness) {
if (isConcurrent) {
LOG.info("Taking fanout lock first");
// take the lock for concurrent NS to block fanout calls
assertTrue(routerContext.getRouter().getRpcServer()
.getRPCClient().getRouterRpcFairnessPolicyController()
.acquirePermit(RouterRpcFairnessConstants.CONCURRENT_NS));
} else {
for (String ns : cluster.getNameservices()) {
LOG.info("Taking lock first for ns: {}", ns);
assertTrue(routerContext.getRouter().getRpcServer()
.getRPCClient().getRouterRpcFairnessPolicyController()
.acquirePermit(ns));
}
}
}
int originalRejectedPermits = getTotalRejectedPermits(routerContext);
// |- All calls should fail since permits not released
innerCalls(address, numOps, isConcurrent, conf, overloadException);
int latestRejectedPermits = getTotalRejectedPermits(routerContext);
assertEquals(latestRejectedPermits - originalRejectedPermits,
overloadException.get());
if (fairness) {
assertTrue(overloadException.get() > 0);
if (isConcurrent) {
LOG.info("Release fanout lock that was taken before test");
// take the lock for concurrent NS to block fanout calls
routerContext.getRouter().getRpcServer()
.getRPCClient().getRouterRpcFairnessPolicyController()
.releasePermit(RouterRpcFairnessConstants.CONCURRENT_NS);
} else {
for (String ns : cluster.getNameservices()) {
routerContext.getRouter().getRpcServer()
.getRPCClient().getRouterRpcFairnessPolicyController()
.releasePermit(ns);
}
}
} else {
assertEquals("Number of failed RPCs without fairness configured",
0, overloadException.get());
}
// Test when handlers are not overloaded
int originalAcceptedPermits = getTotalAcceptedPermits(routerContext);
overloadException = new AtomicInteger();
// |- All calls should succeed since permits not acquired
innerCalls(address, numOps, isConcurrent, conf, overloadException);
int latestAcceptedPermits = getTotalAcceptedPermits(routerContext);
assertEquals(latestAcceptedPermits - originalAcceptedPermits, numOps);
assertEquals(overloadException.get(), 0);
}
private void invokeSequential(ClientProtocol routerProto) throws IOException {
routerProto.getFileInfo("/test.txt");
}
private void invokeConcurrent(ClientProtocol routerProto, String clientName)
throws IOException {
routerProto.renewLease(clientName);
}
private int getTotalRejectedPermits(RouterContext routerContext) {
int totalRejectedPermits = 0;
for (String ns : cluster.getNameservices()) {
totalRejectedPermits += routerContext.getRouterRpcClient()
.getRejectedPermitForNs(ns);
}
totalRejectedPermits += routerContext.getRouterRpcClient()
.getRejectedPermitForNs(RouterRpcFairnessConstants.CONCURRENT_NS);
return totalRejectedPermits;
}
private int getTotalAcceptedPermits(RouterContext routerContext) {
int totalAcceptedPermits = 0;
for (String ns : cluster.getNameservices()) {
totalAcceptedPermits += routerContext.getRouterRpcClient()
.getAcceptedPermitForNs(ns);
}
totalAcceptedPermits += routerContext.getRouterRpcClient()
.getAcceptedPermitForNs(RouterRpcFairnessConstants.CONCURRENT_NS);
return totalAcceptedPermits;
}
private void innerCalls(URI address, int numOps, boolean isConcurrent,
Configuration conf, AtomicInteger overloadException) throws IOException {
for (int i = 0; i < numOps; i++) {
DFSClient routerClient = null;
try {
routerClient = new DFSClient(address, conf);
String clientName = routerClient.getClientName();
ClientProtocol routerProto = routerClient.getNamenode();
if (isConcurrent) {
invokeConcurrent(routerProto, clientName);
} else {
invokeSequential(routerProto);
}
} catch (RemoteException re) {
IOException ioe = re.unwrapRemoteException();
assertTrue("Wrong exception: " + ioe,
ioe instanceof StandbyException);
assertExceptionContains("is overloaded for NS", ioe);
overloadException.incrementAndGet();
} catch (Throwable e) {
throw e;
} finally {
if (routerClient != null) {
try {
routerClient.close();
} catch (IOException e) {
LOG.error("Cannot close the client");
}
}
}
overloadException.get();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.kafka;
import org.junit.Test;
import org.apache.storm.utils.Time;
import org.junit.After;
import org.junit.Before;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class ExponentialBackoffMsgRetryManagerTest {
private static final Long TEST_OFFSET = 101L;
private static final Long TEST_OFFSET2 = 102L;
private static final Long TEST_OFFSET3 = 105L;
private static final Long TEST_NEW_OFFSET = 103L;
@Before
public void setup() throws Exception {
Time.startSimulating();
}
@After
public void cleanup() throws Exception {
Time.stopSimulating();
}
@Test
public void testImmediateRetry() throws Exception {
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(0, 0d, 0, Integer.MAX_VALUE);
manager.failed(TEST_OFFSET);
Long next = manager.nextFailedMessageToRetry();
assertEquals("expect test offset next available for retry", TEST_OFFSET, next);
assertTrue("message should be ready for retry immediately", manager.shouldReEmitMsg(TEST_OFFSET));
manager.retryStarted(TEST_OFFSET);
manager.failed(TEST_OFFSET);
next = manager.nextFailedMessageToRetry();
assertEquals("expect test offset next available for retry", TEST_OFFSET, next);
assertTrue("message should be ready for retry immediately", manager.shouldReEmitMsg(TEST_OFFSET));
}
@Test
public void testSingleDelay() throws Exception {
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(100, 1d, 1000, Integer.MAX_VALUE);
manager.failed(TEST_OFFSET);
Time.advanceTime(5);
Long next = manager.nextFailedMessageToRetry();
assertNull("expect no message ready for retry yet", next);
assertFalse("message should not be ready for retry yet", manager.shouldReEmitMsg(TEST_OFFSET));
Time.advanceTime(100);
next = manager.nextFailedMessageToRetry();
assertEquals("expect test offset next available for retry", TEST_OFFSET, next);
assertTrue("message should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET));
}
@Test
public void testExponentialBackoff() throws Exception {
final long initial = 10;
final double mult = 2d;
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(initial, mult, initial * 10, Integer.MAX_VALUE);
long expectedWaitTime = initial;
for (long i = 0L; i < 3L; ++i) {
manager.failed(TEST_OFFSET);
Time.advanceTime((expectedWaitTime + 1L) / 2L);
assertFalse("message should not be ready for retry yet", manager.shouldReEmitMsg(TEST_OFFSET));
Time.advanceTime((expectedWaitTime + 1L) / 2L);
Long next = manager.nextFailedMessageToRetry();
assertEquals("expect test offset next available for retry", TEST_OFFSET, next);
assertTrue("message should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET));
manager.retryStarted(TEST_OFFSET);
expectedWaitTime *= mult;
}
}
@Test
public void testRetryOrder() throws Exception {
final long initial = 10;
final double mult = 2d;
final long max = 20;
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(initial, mult, max, Integer.MAX_VALUE);
manager.failed(TEST_OFFSET);
Time.advanceTime(initial);
manager.retryStarted(TEST_OFFSET);
manager.failed(TEST_OFFSET);
manager.failed(TEST_OFFSET2);
// although TEST_OFFSET failed first, it's retry delay time is longer b/c this is the second retry
// so TEST_OFFSET2 should come first
Time.advanceTime(initial * 2);
assertTrue("message "+TEST_OFFSET+"should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET));
assertTrue("message "+TEST_OFFSET2+"should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET2));
Long next = manager.nextFailedMessageToRetry();
assertEquals("expect first message to retry is "+TEST_OFFSET2, TEST_OFFSET2, next);
Time.advanceTime(initial);
// haven't retried yet, so first should still be TEST_OFFSET2
next = manager.nextFailedMessageToRetry();
assertEquals("expect first message to retry is "+TEST_OFFSET2, TEST_OFFSET2, next);
manager.retryStarted(next);
// now it should be TEST_OFFSET
next = manager.nextFailedMessageToRetry();
assertEquals("expect message to retry is now "+TEST_OFFSET, TEST_OFFSET, next);
manager.retryStarted(next);
// now none left
next = manager.nextFailedMessageToRetry();
assertNull("expect no message to retry now", next);
}
@Test
public void testQueriesAfterRetriedAlready() throws Exception {
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(0, 0d, 0, Integer.MAX_VALUE);
manager.failed(TEST_OFFSET);
Long next = manager.nextFailedMessageToRetry();
assertEquals("expect test offset next available for retry", TEST_OFFSET, next);
assertTrue("message should be ready for retry immediately", manager.shouldReEmitMsg(TEST_OFFSET));
manager.retryStarted(TEST_OFFSET);
next = manager.nextFailedMessageToRetry();
assertNull("expect no message ready after retried", next);
assertFalse("message should not be ready after retried", manager.shouldReEmitMsg(TEST_OFFSET));
}
@Test(expected = IllegalStateException.class)
public void testRetryWithoutFail() throws Exception {
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(0, 0d, 0, Integer.MAX_VALUE);
manager.retryStarted(TEST_OFFSET);
}
@Test(expected = IllegalStateException.class)
public void testFailRetryRetry() throws Exception {
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(0, 0d, 0, Integer.MAX_VALUE);
manager.failed(TEST_OFFSET);
try {
manager.retryStarted(TEST_OFFSET);
} catch (IllegalStateException ise) {
fail("IllegalStateException unexpected here: " + ise);
}
assertFalse("message should not be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET));
manager.retryStarted(TEST_OFFSET);
}
@Test
public void testMaxBackoff() throws Exception {
final long initial = 100;
final double mult = 2d;
final long max = 2000;
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(initial, mult, max, Integer.MAX_VALUE);
long expectedWaitTime = initial;
for (long i = 0L; i < 4L; ++i) {
manager.failed(TEST_OFFSET);
Time.advanceTime((expectedWaitTime + 1L) / 2L);
assertFalse("message should not be ready for retry yet", manager.shouldReEmitMsg(TEST_OFFSET));
Time.advanceTime((expectedWaitTime + 1L) / 2L);
Long next = manager.nextFailedMessageToRetry();
assertEquals("expect test offset next available for retry", TEST_OFFSET, next);
assertTrue("message should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET));
manager.retryStarted(TEST_OFFSET);
expectedWaitTime = Math.min((long) (expectedWaitTime * mult), max);
}
}
@Test
public void testFailThenAck() throws Exception {
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(0, 0d, 0, Integer.MAX_VALUE);
manager.failed(TEST_OFFSET);
assertTrue("message should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET));
manager.acked(TEST_OFFSET);
Long next = manager.nextFailedMessageToRetry();
assertNull("expect no message ready after acked", next);
assertFalse("message should not be ready after acked", manager.shouldReEmitMsg(TEST_OFFSET));
}
@Test
public void testAckThenFail() throws Exception {
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(0, 0d, 0, Integer.MAX_VALUE);
manager.acked(TEST_OFFSET);
assertFalse("message should not be ready after acked", manager.shouldReEmitMsg(TEST_OFFSET));
manager.failed(TEST_OFFSET);
Long next = manager.nextFailedMessageToRetry();
assertEquals("expect test offset next available for retry", TEST_OFFSET, next);
assertTrue("message should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET));
}
@Test
public void testClearInvalidMessages() throws Exception {
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(0, 0d, 0, Integer.MAX_VALUE);
manager.failed(TEST_OFFSET);
manager.failed(TEST_OFFSET2);
manager.failed(TEST_OFFSET3);
assertTrue("message should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET));
assertTrue("message should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET2));
assertTrue("message should be ready for retry", manager.shouldReEmitMsg(TEST_OFFSET3));
manager.clearOffsetsBefore(TEST_NEW_OFFSET);
Long next = manager.nextFailedMessageToRetry();
assertEquals("expect test offset next available for retry", TEST_OFFSET3, next);
manager.acked(TEST_OFFSET3);
next = manager.nextFailedMessageToRetry();
assertNull("expect no message ready after acked", next);
}
@Test
public void testMaxRetry() throws Exception {
final long initial = 100;
final double mult = 2d;
final long max = 2000;
final int maxRetries = 2;
ExponentialBackoffMsgRetryManager manager = buildExponentialBackoffMsgRetryManager(initial, mult, max, maxRetries);
assertTrue(manager.retryFurther(TEST_OFFSET));
manager.failed(TEST_OFFSET);
assertTrue(manager.retryFurther(TEST_OFFSET));
manager.failed(TEST_OFFSET);
assertFalse(manager.retryFurther(TEST_OFFSET));
}
private ExponentialBackoffMsgRetryManager buildExponentialBackoffMsgRetryManager(long retryInitialDelayMs,
double retryDelayMultiplier,
long retryDelayMaxMs,
int retryLimit) {
SpoutConfig spoutConfig = new SpoutConfig(null, null, null, null);
spoutConfig.retryInitialDelayMs = retryInitialDelayMs;
spoutConfig.retryDelayMultiplier = retryDelayMultiplier;
spoutConfig.retryDelayMaxMs = retryDelayMaxMs;
spoutConfig.retryLimit = retryLimit;
ExponentialBackoffMsgRetryManager exponentialBackoffMsgRetryManager = new ExponentialBackoffMsgRetryManager();
exponentialBackoffMsgRetryManager.prepare(spoutConfig, null);
return exponentialBackoffMsgRetryManager;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.plaf.beg;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.impl.ActionMenuItem;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.event.MenuDragMouseEvent;
import javax.swing.event.MenuDragMouseListener;
import javax.swing.event.MouseInputListener;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.basic.BasicGraphicsUtils;
import javax.swing.plaf.basic.BasicLookAndFeel;
import javax.swing.plaf.basic.BasicMenuItemUI;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.lang.reflect.Method;
/**
* @author Eugene Belyaev
* @author Vladimir Kondratyev
*/
public class BegMenuItemUI extends BasicMenuItemUI {
private static final Rectangle b = new Rectangle(0, 0, 0, 0);
private static final Rectangle j = new Rectangle();
private static final Rectangle d = new Rectangle();
private int myMaxGutterIconWidth;
private int a;
private static Rectangle i = new Rectangle();
private int k;
private int e;
private static final Rectangle c = new Rectangle();
private static final Rectangle h = new Rectangle();
private static final Rectangle l = new Rectangle();
private static final Rectangle f = new Rectangle(32767, 32767);
@NonNls public static final String PLAY_SOUND_METHOD = "playSound";
@NonNls public static final String AQUA_LOOK_AND_FEEL_CLASS_NAME = "apple.laf.AquaLookAndFeel";
@NonNls public static final String GET_KEY_MODIFIERS_TEXT = "getKeyModifiersText";
private Border myAquaSelectedBackgroundPainter;
/** invoked by reflection */
public static ComponentUI createUI(JComponent component) {
return new BegMenuItemUI();
}
public BegMenuItemUI() {
myMaxGutterIconWidth = 18;
if (UIUtil.isUnderAquaBasedLookAndFeel() && myAquaSelectedBackgroundPainter == null) {
myAquaSelectedBackgroundPainter = (Border) UIManager.get("MenuItem.selectedBackgroundPainter");
}
}
protected void installDefaults() {
super.installDefaults();
final String propertyPrefix = getPropertyPrefix();
Integer integer = UIUtil.getPropertyMaxGutterIconWidth(propertyPrefix);
if (integer != null){
myMaxGutterIconWidth = integer.intValue();
}
}
public void paint(Graphics g, JComponent comp) {
UISettings.setupAntialiasing(g);
JMenuItem jmenuitem = (JMenuItem)comp;
ButtonModel buttonmodel = jmenuitem.getModel();
int mnemonicIndex = jmenuitem.getDisplayedMnemonicIndex();
Icon icon1 = getIcon();
Icon icon2 = getAllowedIcon();
int j1 = jmenuitem.getWidth();
int k1 = jmenuitem.getHeight();
Insets insets = comp.getInsets();
initBounds();
f.setBounds(0, 0, j1, k1);
f.x += insets.left;
f.y += insets.top;
f.width -= insets.right + f.x;
f.height -= insets.bottom + f.y;
Font font = g.getFont();
Font font1 = comp.getFont();
g.setFont(font1);
FontMetrics fontmetrics = g.getFontMetrics(font1);
FontMetrics fontmetrics1 = g.getFontMetrics(acceleratorFont);
String keyStrokeText;
if (jmenuitem instanceof ActionMenuItem) {
keyStrokeText = ((ActionMenuItem)jmenuitem).getFirstShortcutText();
}else{
keyStrokeText = getKeyStrokeText(jmenuitem.getAccelerator());
}
String s1 = layoutMenuItem(fontmetrics, jmenuitem.getText(), fontmetrics1, keyStrokeText, icon1, icon2, arrowIcon, jmenuitem.getVerticalAlignment(), jmenuitem.getHorizontalAlignment(), jmenuitem.getVerticalTextPosition(), jmenuitem.getHorizontalTextPosition(), f, l, j, c, h, d, jmenuitem.getText() != null ? defaultTextIconGap : 0, defaultTextIconGap);
Color color2 = g.getColor();
if (comp.isOpaque()){
g.setColor(jmenuitem.getBackground());
g.fillRect(0, 0, j1, k1);
if (buttonmodel.isArmed() || (comp instanceof JMenu) && buttonmodel.isSelected()){
if (UIUtil.isUnderAquaLookAndFeel()) {
myAquaSelectedBackgroundPainter.paintBorder(comp, g, 0, 0, j1, k1);
} else {
g.setColor(selectionBackground);
if (icon2 != null){
g.fillRect(k, 0, j1 - k, k1);
}
else{
g.fillRect(0, 0, j1, k1);
//graphics.setColor(BegResources.q);
//graphics.drawLine(0, 0, 0, k1);
g.setColor(selectionBackground);
}
//graphics.setColor(BegResources.r);
//graphics.drawLine(j1 - 1, 0, j1 - 1, k1);
}
}
g.setColor(color2);
}
if (icon2 != null){
if (buttonmodel.isArmed() || (comp instanceof JMenu) && buttonmodel.isSelected()){
g.setColor(selectionForeground);
}
else{
g.setColor(jmenuitem.getForeground());
}
if (useCheckAndArrow()){
icon2.paintIcon(comp, g, h.x, h.y);
}
g.setColor(color2);
if (menuItem.isArmed()){
drawIconBorder(g);
}
}
if (icon1 != null){
if (!buttonmodel.isEnabled()){
icon1 = jmenuitem.getDisabledIcon();
}
else
if (buttonmodel.isPressed() && buttonmodel.isArmed()){
icon1 = jmenuitem.getPressedIcon();
if (icon1 == null){
icon1 = jmenuitem.getIcon();
}
}
if (icon1 != null){
icon1.paintIcon(comp, g, l.x, l.y);
}
}
if (s1 != null && s1.length() > 0){
if (buttonmodel.isEnabled()){
if (buttonmodel.isArmed() || (comp instanceof JMenu) && buttonmodel.isSelected()){
g.setColor(selectionForeground);
}
else{
g.setColor(jmenuitem.getForeground());
}
BasicGraphicsUtils.drawStringUnderlineCharAt(g, s1, mnemonicIndex, j.x, j.y + fontmetrics.getAscent());
}
else {
final Object disabledForeground = UIUtil.getMenuItemDisabledForegroundObject();
if (disabledForeground instanceof Color){
g.setColor((Color)disabledForeground);
BasicGraphicsUtils.drawStringUnderlineCharAt(g, s1, mnemonicIndex, j.x, j.y + fontmetrics.getAscent());
}
else{
g.setColor(jmenuitem.getBackground().brighter());
BasicGraphicsUtils.drawStringUnderlineCharAt(g, s1, mnemonicIndex, j.x, j.y + fontmetrics.getAscent());
g.setColor(jmenuitem.getBackground().darker());
BasicGraphicsUtils.drawStringUnderlineCharAt(g, s1, mnemonicIndex, j.x - 1, (j.y + fontmetrics.getAscent()) - 1);
}
}
}
if (keyStrokeText != null && !keyStrokeText.isEmpty()){
g.setFont(acceleratorFont);
if (buttonmodel.isEnabled()){
if (UIUtil.isUnderAquaBasedLookAndFeel() && (buttonmodel.isArmed() || (comp instanceof JMenu) && buttonmodel.isSelected())) {
g.setColor(selectionForeground);
}
else {
if (buttonmodel.isArmed() || (comp instanceof JMenu) && buttonmodel.isSelected()) {
g.setColor(acceleratorSelectionForeground);
}
else {
g.setColor(acceleratorForeground);
}
}
BasicGraphicsUtils.drawString(g, keyStrokeText, 0, c.x, c.y + fontmetrics.getAscent());
}
else
if (disabledForeground != null){
g.setColor(disabledForeground);
BasicGraphicsUtils.drawString(g, keyStrokeText, 0, c.x, c.y + fontmetrics.getAscent());
}
else{
g.setColor(jmenuitem.getBackground().brighter());
BasicGraphicsUtils.drawString(g, keyStrokeText, 0, c.x, c.y + fontmetrics.getAscent());
g.setColor(jmenuitem.getBackground().darker());
BasicGraphicsUtils.drawString(g, keyStrokeText, 0, c.x - 1, (c.y + fontmetrics.getAscent()) - 1);
}
}
if (arrowIcon != null){
if (buttonmodel.isArmed() || (comp instanceof JMenu) && buttonmodel.isSelected()){
g.setColor(selectionForeground);
}
if (useCheckAndArrow()){
arrowIcon.paintIcon(comp, g, d.x, d.y);
}
}
g.setColor(color2);
g.setFont(font);
}
private String getKeyStrokeText(KeyStroke keystroke) {
String s1 = "";
if (keystroke != null){
int j1 = keystroke.getModifiers();
if (j1 > 0){
if (SystemInfo.isMac) {
try {
Class appleLaf = Class.forName(AQUA_LOOK_AND_FEEL_CLASS_NAME);
Method getModifiers = appleLaf.getMethod(GET_KEY_MODIFIERS_TEXT, new Class[] {int.class, boolean.class});
s1 = (String)getModifiers.invoke(appleLaf, new Object[] {new Integer(j1), Boolean.FALSE});
}
catch (Exception e) {
if (SystemInfo.isMacOSLeopard) {
s1 = KeymapUtil.getKeyModifiersTextForMacOSLeopard(j1);
}
else {
s1 = KeyEvent.getKeyModifiersText(j1) + '+';
}
}
}
else {
s1 = KeyEvent.getKeyModifiersText(j1) + '+';
}
}
s1 = s1 + KeyEvent.getKeyText(keystroke.getKeyCode());
}
return s1;
}
private boolean useCheckAndArrow() {
if ((menuItem instanceof JMenu) && ((JMenu)menuItem).isTopLevelMenu()){
return false;
}
return true;
}
public MenuElement[] getPath() {
MenuSelectionManager menuselectionmanager = MenuSelectionManager.defaultManager();
MenuElement amenuelement[] = menuselectionmanager.getSelectedPath();
int i1 = amenuelement.length;
if (i1 == 0){
return new MenuElement[0];
}
java.awt.Container container = menuItem.getParent();
MenuElement amenuelement1[];
if (amenuelement[i1 - 1].getComponent() == container){
amenuelement1 = new MenuElement[i1 + 1];
System.arraycopy(amenuelement, 0, amenuelement1, 0, i1);
amenuelement1[i1] = menuItem;
}
else{
int j1;
for(j1 = amenuelement.length - 1; j1 >= 0; j1--){
if (amenuelement[j1].getComponent() == container){
break;
}
}
amenuelement1 = new MenuElement[j1 + 2];
System.arraycopy(amenuelement, 0, amenuelement1, 0, j1 + 1);
amenuelement1[j1 + 1] = menuItem;
}
return amenuelement1;
}
public Dimension getMinimumSize(JComponent jcomponent) {
return null;
}
private String layoutMenuItem(
FontMetrics fontmetrics,
String text,
FontMetrics fontmetrics1,
String keyStrokeText,
Icon icon,
Icon checkIcon,
Icon arrowIcon,
int verticalAlignment,
int horizontalAlignment,
int verticalTextPosition,
int horizontalTextPosition,
Rectangle viewRect,
Rectangle iconRect,
Rectangle textRect,
Rectangle acceleratorRect,
Rectangle checkIconRect,
Rectangle arrowIconRect,
int textIconGap,
int menuItemGap
) {
SwingUtilities.layoutCompoundLabel(menuItem, fontmetrics, text, icon, verticalAlignment, horizontalAlignment, verticalTextPosition, horizontalTextPosition, viewRect, iconRect, textRect, textIconGap);
if (keyStrokeText == null || "".equals(keyStrokeText)){
acceleratorRect.width = acceleratorRect.height = 0;
}
else{
acceleratorRect.width = SwingUtilities.computeStringWidth(fontmetrics1, keyStrokeText);
acceleratorRect.height = fontmetrics1.getHeight();
}
/* Initialize the checkIcon bounds rectangle's width & height.
*/
if (useCheckAndArrow()){
if (checkIcon != null){
checkIconRect.width = checkIcon.getIconWidth();
checkIconRect.height = checkIcon.getIconHeight();
}else{
checkIconRect.width = checkIconRect.height = 0;
}
/* Initialize the arrowIcon bounds rectangle width & height.
*/
if (arrowIcon != null){
arrowIconRect.width = arrowIcon.getIconWidth();
arrowIconRect.height = arrowIcon.getIconHeight();
}else{
arrowIconRect.width = arrowIconRect.height = 0;
}
textRect.x += myMaxGutterIconWidth;
iconRect.x += myMaxGutterIconWidth;
}
textRect.x += menuItemGap;
iconRect.x += menuItemGap;
Rectangle labelRect = iconRect.union(textRect);
// Position the Accelerator text rect
acceleratorRect.x += viewRect.width - arrowIconRect.width - menuItemGap - acceleratorRect.width;
acceleratorRect.y = (viewRect.y + viewRect.height / 2) - acceleratorRect.height / 2;
// Position the Check and Arrow Icons
if (useCheckAndArrow()){
arrowIconRect.x += viewRect.width - arrowIconRect.width;
arrowIconRect.y = (viewRect.y + labelRect.height / 2) - arrowIconRect.height / 2;
if (checkIcon != null){
checkIconRect.y = (viewRect.y + labelRect.height / 2) - checkIconRect.height / 2;
checkIconRect.x += (viewRect.x + myMaxGutterIconWidth / 2) - checkIcon.getIconWidth() / 2;
a = viewRect.x;
e = (viewRect.y + labelRect.height / 2) - myMaxGutterIconWidth / 2;
k = viewRect.x + myMaxGutterIconWidth + 2;
}
else{
checkIconRect.x = checkIconRect.y = 0;
}
}
return text;
}
private Icon getIcon() {
Icon icon = menuItem.getIcon();
if (icon != null && getAllowedIcon() != null){
icon = null;
}
return icon;
}
public Dimension getPreferredSize(JComponent comp) {
JMenuItem jmenuitem = (JMenuItem)comp;
Icon icon1 = getIcon();
Icon icon2 = getAllowedIcon();
String text = jmenuitem.getText();
String keyStrokeText;
if (jmenuitem instanceof ActionMenuItem) {
keyStrokeText = ((ActionMenuItem)jmenuitem).getFirstShortcutText();
}else{
keyStrokeText = getKeyStrokeText(jmenuitem.getAccelerator());
}
Font font = jmenuitem.getFont();
FontMetrics fontmetrics = comp.getFontMetrics(font);
FontMetrics fontmetrics1 = comp.getFontMetrics(acceleratorFont);
initBounds();
layoutMenuItem(fontmetrics, text, fontmetrics1, keyStrokeText, icon1, icon2, arrowIcon, jmenuitem.getVerticalAlignment(), jmenuitem.getHorizontalAlignment(), jmenuitem.getVerticalTextPosition(), jmenuitem.getHorizontalTextPosition(), f, l, j, c, h, d, text != null ? defaultTextIconGap : 0, defaultTextIconGap);
i.setBounds(j);
i = SwingUtilities.computeUnion(l.x, l.y, l.width, l.height, i);
if (!(keyStrokeText == null || "".equals(keyStrokeText))){
i.width += c.width;
i.width += 7 * defaultTextIconGap;
}
if (useCheckAndArrow()){
i.width += myMaxGutterIconWidth;
i.width += defaultTextIconGap;
i.width += defaultTextIconGap;
i.width += d.width;
}
i.width += 2 * defaultTextIconGap;
Insets insets = jmenuitem.getInsets();
if (insets != null){
i.width += insets.left + insets.right;
i.height += insets.top + insets.bottom;
}
if (i.width % 2 == 0){
i.width++;
}
if (i.height % 2 == 0){
i.height++;
}
return i.getSize();
}
private void drawIconBorder(Graphics g) {
/*
int i1 = a - 1;
int j1 = e - 2;
int k1 = i1 + myMaxGutterIconWidth + 1;
int l1 = j1 + myMaxGutterIconWidth + 4;
g.setColor(BegResources.m);
g.drawLine(i1, j1, i1, l1);
g.drawLine(i1, j1, k1, j1);
g.setColor(BegResources.j);
g.drawLine(k1, j1, k1, l1);
g.drawLine(i1, l1, k1, l1);
*/
}
private void initBounds() {
l.setBounds(b);
j.setBounds(b);
c.setBounds(b);
h.setBounds(b);
d.setBounds(b);
f.setBounds(0, 0, 32767, 32767);
i.setBounds(b);
}
private Icon getAllowedIcon() {
Icon icon = menuItem.isEnabled() ? menuItem.getIcon() : menuItem.getDisabledIcon();
if (icon != null && icon.getIconWidth() > myMaxGutterIconWidth){
icon = null;
}
return icon;
}
public Dimension getMaximumSize(JComponent comp) {
return null;
}
public void update(Graphics g, JComponent comp) {
paint(g, comp);
}
/** Copied from BasicMenuItemUI */
@SuppressWarnings({"HardCodedStringLiteral"})
private boolean isInternalFrameSystemMenu(){
String actionCommand=menuItem.getActionCommand();
if(
("Close".equals(actionCommand))||
("Minimize".equals(actionCommand))||
("Restore".equals(actionCommand))||
("Maximize".equals(actionCommand))
){
return true;
} else{
return false;
}
}
/** Copied from BasicMenuItemUI */
private void doClick(MenuSelectionManager msm,MouseEvent e) {
// Auditory cue
if(!isInternalFrameSystemMenu()){
@NonNls ActionMap map=menuItem.getActionMap();
if(map!=null){
Action audioAction=map.get(getPropertyPrefix()+".commandSound");
if(audioAction!=null){
// pass off firing the Action to a utility method
BasicLookAndFeel lf=(BasicLookAndFeel)UIManager.getLookAndFeel();
// It's a hack. The method BasicLookAndFeel.playSound has protected access, so
// it's imposible to mormally invoke it.
try {
Method playSoundMethod=BasicLookAndFeel.class.getDeclaredMethod(PLAY_SOUND_METHOD,new Class[]{Action.class});
playSoundMethod.setAccessible(true);
playSoundMethod.invoke(lf,new Object[]{audioAction});
} catch(Exception ignored) {}
}
}
}
// Visual feedback
if(msm==null){
msm=MenuSelectionManager.defaultManager();
}
msm.clearSelectedPath();
((ActionMenuItem)menuItem).fireActionPerformed(
new ActionEvent(
menuItem,
ActionEvent.ACTION_PERFORMED,
null,
e.getWhen(),
e.getModifiers()
)
);
}
protected MouseInputListener createMouseInputListener(JComponent c){
return new MyMouseInputHandler();
}
private class MyMouseInputHandler extends MouseInputHandler{
public void mouseReleased(MouseEvent e){
MenuSelectionManager manager=MenuSelectionManager.defaultManager();
Point p=e.getPoint();
if(p.x>=0&&p.x<menuItem.getWidth()&&p.y>=0&&p.y<menuItem.getHeight()){
doClick(manager,e);
} else{
manager.processMouseEvent(e);
}
}
}
protected MenuDragMouseListener createMenuDragMouseListener(JComponent c){
return new MyMenuDragMouseHandler();
}
private class MyMenuDragMouseHandler implements MenuDragMouseListener {
public void menuDragMouseEntered(MenuDragMouseEvent e){}
public void menuDragMouseDragged(MenuDragMouseEvent e){
MenuSelectionManager manager=e.getMenuSelectionManager();
MenuElement path[]=e.getPath();
manager.setSelectedPath(path);
}
public void menuDragMouseExited(MenuDragMouseEvent e){}
public void menuDragMouseReleased(MenuDragMouseEvent e){
MenuSelectionManager manager=e.getMenuSelectionManager();
Point p=e.getPoint();
if(p.x>=0&&p.x<menuItem.getWidth()&&
p.y>=0&&p.y<menuItem.getHeight()){
doClick(manager,e);
} else{
manager.clearSelectedPath();
}
}
}
}
| |
/**
*/
package CIM.IEC61968.Customers.impl;
import CIM.IEC61968.Common.Status;
import CIM.IEC61968.Common.impl.OrganisationImpl;
import CIM.IEC61968.Customers.Customer;
import CIM.IEC61968.Customers.CustomerAgreement;
import CIM.IEC61968.Customers.CustomerKind;
import CIM.IEC61968.Customers.CustomersPackage;
import CIM.IEC61968.Metering.EndDeviceAsset;
import CIM.IEC61968.Metering.MeteringPackage;
import CIM.IEC61968.Work.Work;
import CIM.IEC61968.Work.WorkPackage;
import CIM.IEC61970.Informative.InfERPSupport.ErpPerson;
import CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage;
import CIM.IEC61970.Informative.InfOperations.InfOperationsPackage;
import CIM.IEC61970.Informative.InfOperations.OutageNotification;
import CIM.IEC61970.Informative.InfOperations.PlannedOutage;
import CIM.IEC61970.Informative.InfOperations.TroubleTicket;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Customer</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getPlannedOutage <em>Planned Outage</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getEndDeviceAssets <em>End Device Assets</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getStatus <em>Status</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getKind <em>Kind</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getTroubleTickets <em>Trouble Tickets</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getErpPersons <em>Erp Persons</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getOutageNotifications <em>Outage Notifications</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getSpecialNeed <em>Special Need</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getPucNumber <em>Puc Number</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getWorks <em>Works</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#getCustomerAgreements <em>Customer Agreements</em>}</li>
* <li>{@link CIM.IEC61968.Customers.impl.CustomerImpl#isVip <em>Vip</em>}</li>
* </ul>
*
* @generated
*/
public class CustomerImpl extends OrganisationImpl implements Customer {
/**
* The cached value of the '{@link #getPlannedOutage() <em>Planned Outage</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPlannedOutage()
* @generated
* @ordered
*/
protected PlannedOutage plannedOutage;
/**
* The cached value of the '{@link #getEndDeviceAssets() <em>End Device Assets</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getEndDeviceAssets()
* @generated
* @ordered
*/
protected EList<EndDeviceAsset> endDeviceAssets;
/**
* The cached value of the '{@link #getStatus() <em>Status</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStatus()
* @generated
* @ordered
*/
protected Status status;
/**
* The default value of the '{@link #getKind() <em>Kind</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getKind()
* @generated
* @ordered
*/
protected static final CustomerKind KIND_EDEFAULT = CustomerKind.RESIDENTIAL;
/**
* The cached value of the '{@link #getKind() <em>Kind</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getKind()
* @generated
* @ordered
*/
protected CustomerKind kind = KIND_EDEFAULT;
/**
* The cached value of the '{@link #getTroubleTickets() <em>Trouble Tickets</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTroubleTickets()
* @generated
* @ordered
*/
protected EList<TroubleTicket> troubleTickets;
/**
* The cached value of the '{@link #getErpPersons() <em>Erp Persons</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getErpPersons()
* @generated
* @ordered
*/
protected EList<ErpPerson> erpPersons;
/**
* The cached value of the '{@link #getOutageNotifications() <em>Outage Notifications</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOutageNotifications()
* @generated
* @ordered
*/
protected EList<OutageNotification> outageNotifications;
/**
* The default value of the '{@link #getSpecialNeed() <em>Special Need</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSpecialNeed()
* @generated
* @ordered
*/
protected static final String SPECIAL_NEED_EDEFAULT = null;
/**
* The cached value of the '{@link #getSpecialNeed() <em>Special Need</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSpecialNeed()
* @generated
* @ordered
*/
protected String specialNeed = SPECIAL_NEED_EDEFAULT;
/**
* The default value of the '{@link #getPucNumber() <em>Puc Number</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPucNumber()
* @generated
* @ordered
*/
protected static final String PUC_NUMBER_EDEFAULT = null;
/**
* The cached value of the '{@link #getPucNumber() <em>Puc Number</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPucNumber()
* @generated
* @ordered
*/
protected String pucNumber = PUC_NUMBER_EDEFAULT;
/**
* The cached value of the '{@link #getWorks() <em>Works</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getWorks()
* @generated
* @ordered
*/
protected EList<Work> works;
/**
* The cached value of the '{@link #getCustomerAgreements() <em>Customer Agreements</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCustomerAgreements()
* @generated
* @ordered
*/
protected EList<CustomerAgreement> customerAgreements;
/**
* The default value of the '{@link #isVip() <em>Vip</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isVip()
* @generated
* @ordered
*/
protected static final boolean VIP_EDEFAULT = false;
/**
* The cached value of the '{@link #isVip() <em>Vip</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isVip()
* @generated
* @ordered
*/
protected boolean vip = VIP_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected CustomerImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return CustomersPackage.Literals.CUSTOMER;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public PlannedOutage getPlannedOutage() {
if (plannedOutage != null && plannedOutage.eIsProxy()) {
InternalEObject oldPlannedOutage = (InternalEObject)plannedOutage;
plannedOutage = (PlannedOutage)eResolveProxy(oldPlannedOutage);
if (plannedOutage != oldPlannedOutage) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, CustomersPackage.CUSTOMER__PLANNED_OUTAGE, oldPlannedOutage, plannedOutage));
}
}
return plannedOutage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public PlannedOutage basicGetPlannedOutage() {
return plannedOutage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetPlannedOutage(PlannedOutage newPlannedOutage, NotificationChain msgs) {
PlannedOutage oldPlannedOutage = plannedOutage;
plannedOutage = newPlannedOutage;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, CustomersPackage.CUSTOMER__PLANNED_OUTAGE, oldPlannedOutage, newPlannedOutage);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPlannedOutage(PlannedOutage newPlannedOutage) {
if (newPlannedOutage != plannedOutage) {
NotificationChain msgs = null;
if (plannedOutage != null)
msgs = ((InternalEObject)plannedOutage).eInverseRemove(this, InfOperationsPackage.PLANNED_OUTAGE__CUSTOMER_DATAS, PlannedOutage.class, msgs);
if (newPlannedOutage != null)
msgs = ((InternalEObject)newPlannedOutage).eInverseAdd(this, InfOperationsPackage.PLANNED_OUTAGE__CUSTOMER_DATAS, PlannedOutage.class, msgs);
msgs = basicSetPlannedOutage(newPlannedOutage, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CustomersPackage.CUSTOMER__PLANNED_OUTAGE, newPlannedOutage, newPlannedOutage));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<EndDeviceAsset> getEndDeviceAssets() {
if (endDeviceAssets == null) {
endDeviceAssets = new EObjectWithInverseResolvingEList<EndDeviceAsset>(EndDeviceAsset.class, this, CustomersPackage.CUSTOMER__END_DEVICE_ASSETS, MeteringPackage.END_DEVICE_ASSET__CUSTOMER);
}
return endDeviceAssets;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Status getStatus() {
if (status != null && status.eIsProxy()) {
InternalEObject oldStatus = (InternalEObject)status;
status = (Status)eResolveProxy(oldStatus);
if (status != oldStatus) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, CustomersPackage.CUSTOMER__STATUS, oldStatus, status));
}
}
return status;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Status basicGetStatus() {
return status;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStatus(Status newStatus) {
Status oldStatus = status;
status = newStatus;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CustomersPackage.CUSTOMER__STATUS, oldStatus, status));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CustomerKind getKind() {
return kind;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setKind(CustomerKind newKind) {
CustomerKind oldKind = kind;
kind = newKind == null ? KIND_EDEFAULT : newKind;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CustomersPackage.CUSTOMER__KIND, oldKind, kind));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TroubleTicket> getTroubleTickets() {
if (troubleTickets == null) {
troubleTickets = new EObjectWithInverseResolvingEList<TroubleTicket>(TroubleTicket.class, this, CustomersPackage.CUSTOMER__TROUBLE_TICKETS, InfOperationsPackage.TROUBLE_TICKET__CUSTOMER_DATA);
}
return troubleTickets;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ErpPerson> getErpPersons() {
if (erpPersons == null) {
erpPersons = new EObjectWithInverseResolvingEList<ErpPerson>(ErpPerson.class, this, CustomersPackage.CUSTOMER__ERP_PERSONS, InfERPSupportPackage.ERP_PERSON__CUSTOMER_DATA);
}
return erpPersons;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<OutageNotification> getOutageNotifications() {
if (outageNotifications == null) {
outageNotifications = new EObjectWithInverseResolvingEList.ManyInverse<OutageNotification>(OutageNotification.class, this, CustomersPackage.CUSTOMER__OUTAGE_NOTIFICATIONS, InfOperationsPackage.OUTAGE_NOTIFICATION__CUSTOMER_DATAS);
}
return outageNotifications;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getSpecialNeed() {
return specialNeed;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSpecialNeed(String newSpecialNeed) {
String oldSpecialNeed = specialNeed;
specialNeed = newSpecialNeed;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CustomersPackage.CUSTOMER__SPECIAL_NEED, oldSpecialNeed, specialNeed));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getPucNumber() {
return pucNumber;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPucNumber(String newPucNumber) {
String oldPucNumber = pucNumber;
pucNumber = newPucNumber;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CustomersPackage.CUSTOMER__PUC_NUMBER, oldPucNumber, pucNumber));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Work> getWorks() {
if (works == null) {
works = new EObjectWithInverseResolvingEList.ManyInverse<Work>(Work.class, this, CustomersPackage.CUSTOMER__WORKS, WorkPackage.WORK__CUSTOMERS);
}
return works;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<CustomerAgreement> getCustomerAgreements() {
if (customerAgreements == null) {
customerAgreements = new EObjectWithInverseResolvingEList<CustomerAgreement>(CustomerAgreement.class, this, CustomersPackage.CUSTOMER__CUSTOMER_AGREEMENTS, CustomersPackage.CUSTOMER_AGREEMENT__CUSTOMER);
}
return customerAgreements;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isVip() {
return vip;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setVip(boolean newVip) {
boolean oldVip = vip;
vip = newVip;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, CustomersPackage.CUSTOMER__VIP, oldVip, vip));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case CustomersPackage.CUSTOMER__PLANNED_OUTAGE:
if (plannedOutage != null)
msgs = ((InternalEObject)plannedOutage).eInverseRemove(this, InfOperationsPackage.PLANNED_OUTAGE__CUSTOMER_DATAS, PlannedOutage.class, msgs);
return basicSetPlannedOutage((PlannedOutage)otherEnd, msgs);
case CustomersPackage.CUSTOMER__END_DEVICE_ASSETS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getEndDeviceAssets()).basicAdd(otherEnd, msgs);
case CustomersPackage.CUSTOMER__TROUBLE_TICKETS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getTroubleTickets()).basicAdd(otherEnd, msgs);
case CustomersPackage.CUSTOMER__ERP_PERSONS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getErpPersons()).basicAdd(otherEnd, msgs);
case CustomersPackage.CUSTOMER__OUTAGE_NOTIFICATIONS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getOutageNotifications()).basicAdd(otherEnd, msgs);
case CustomersPackage.CUSTOMER__WORKS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getWorks()).basicAdd(otherEnd, msgs);
case CustomersPackage.CUSTOMER__CUSTOMER_AGREEMENTS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getCustomerAgreements()).basicAdd(otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case CustomersPackage.CUSTOMER__PLANNED_OUTAGE:
return basicSetPlannedOutage(null, msgs);
case CustomersPackage.CUSTOMER__END_DEVICE_ASSETS:
return ((InternalEList<?>)getEndDeviceAssets()).basicRemove(otherEnd, msgs);
case CustomersPackage.CUSTOMER__TROUBLE_TICKETS:
return ((InternalEList<?>)getTroubleTickets()).basicRemove(otherEnd, msgs);
case CustomersPackage.CUSTOMER__ERP_PERSONS:
return ((InternalEList<?>)getErpPersons()).basicRemove(otherEnd, msgs);
case CustomersPackage.CUSTOMER__OUTAGE_NOTIFICATIONS:
return ((InternalEList<?>)getOutageNotifications()).basicRemove(otherEnd, msgs);
case CustomersPackage.CUSTOMER__WORKS:
return ((InternalEList<?>)getWorks()).basicRemove(otherEnd, msgs);
case CustomersPackage.CUSTOMER__CUSTOMER_AGREEMENTS:
return ((InternalEList<?>)getCustomerAgreements()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case CustomersPackage.CUSTOMER__PLANNED_OUTAGE:
if (resolve) return getPlannedOutage();
return basicGetPlannedOutage();
case CustomersPackage.CUSTOMER__END_DEVICE_ASSETS:
return getEndDeviceAssets();
case CustomersPackage.CUSTOMER__STATUS:
if (resolve) return getStatus();
return basicGetStatus();
case CustomersPackage.CUSTOMER__KIND:
return getKind();
case CustomersPackage.CUSTOMER__TROUBLE_TICKETS:
return getTroubleTickets();
case CustomersPackage.CUSTOMER__ERP_PERSONS:
return getErpPersons();
case CustomersPackage.CUSTOMER__OUTAGE_NOTIFICATIONS:
return getOutageNotifications();
case CustomersPackage.CUSTOMER__SPECIAL_NEED:
return getSpecialNeed();
case CustomersPackage.CUSTOMER__PUC_NUMBER:
return getPucNumber();
case CustomersPackage.CUSTOMER__WORKS:
return getWorks();
case CustomersPackage.CUSTOMER__CUSTOMER_AGREEMENTS:
return getCustomerAgreements();
case CustomersPackage.CUSTOMER__VIP:
return isVip();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case CustomersPackage.CUSTOMER__PLANNED_OUTAGE:
setPlannedOutage((PlannedOutage)newValue);
return;
case CustomersPackage.CUSTOMER__END_DEVICE_ASSETS:
getEndDeviceAssets().clear();
getEndDeviceAssets().addAll((Collection<? extends EndDeviceAsset>)newValue);
return;
case CustomersPackage.CUSTOMER__STATUS:
setStatus((Status)newValue);
return;
case CustomersPackage.CUSTOMER__KIND:
setKind((CustomerKind)newValue);
return;
case CustomersPackage.CUSTOMER__TROUBLE_TICKETS:
getTroubleTickets().clear();
getTroubleTickets().addAll((Collection<? extends TroubleTicket>)newValue);
return;
case CustomersPackage.CUSTOMER__ERP_PERSONS:
getErpPersons().clear();
getErpPersons().addAll((Collection<? extends ErpPerson>)newValue);
return;
case CustomersPackage.CUSTOMER__OUTAGE_NOTIFICATIONS:
getOutageNotifications().clear();
getOutageNotifications().addAll((Collection<? extends OutageNotification>)newValue);
return;
case CustomersPackage.CUSTOMER__SPECIAL_NEED:
setSpecialNeed((String)newValue);
return;
case CustomersPackage.CUSTOMER__PUC_NUMBER:
setPucNumber((String)newValue);
return;
case CustomersPackage.CUSTOMER__WORKS:
getWorks().clear();
getWorks().addAll((Collection<? extends Work>)newValue);
return;
case CustomersPackage.CUSTOMER__CUSTOMER_AGREEMENTS:
getCustomerAgreements().clear();
getCustomerAgreements().addAll((Collection<? extends CustomerAgreement>)newValue);
return;
case CustomersPackage.CUSTOMER__VIP:
setVip((Boolean)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case CustomersPackage.CUSTOMER__PLANNED_OUTAGE:
setPlannedOutage((PlannedOutage)null);
return;
case CustomersPackage.CUSTOMER__END_DEVICE_ASSETS:
getEndDeviceAssets().clear();
return;
case CustomersPackage.CUSTOMER__STATUS:
setStatus((Status)null);
return;
case CustomersPackage.CUSTOMER__KIND:
setKind(KIND_EDEFAULT);
return;
case CustomersPackage.CUSTOMER__TROUBLE_TICKETS:
getTroubleTickets().clear();
return;
case CustomersPackage.CUSTOMER__ERP_PERSONS:
getErpPersons().clear();
return;
case CustomersPackage.CUSTOMER__OUTAGE_NOTIFICATIONS:
getOutageNotifications().clear();
return;
case CustomersPackage.CUSTOMER__SPECIAL_NEED:
setSpecialNeed(SPECIAL_NEED_EDEFAULT);
return;
case CustomersPackage.CUSTOMER__PUC_NUMBER:
setPucNumber(PUC_NUMBER_EDEFAULT);
return;
case CustomersPackage.CUSTOMER__WORKS:
getWorks().clear();
return;
case CustomersPackage.CUSTOMER__CUSTOMER_AGREEMENTS:
getCustomerAgreements().clear();
return;
case CustomersPackage.CUSTOMER__VIP:
setVip(VIP_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case CustomersPackage.CUSTOMER__PLANNED_OUTAGE:
return plannedOutage != null;
case CustomersPackage.CUSTOMER__END_DEVICE_ASSETS:
return endDeviceAssets != null && !endDeviceAssets.isEmpty();
case CustomersPackage.CUSTOMER__STATUS:
return status != null;
case CustomersPackage.CUSTOMER__KIND:
return kind != KIND_EDEFAULT;
case CustomersPackage.CUSTOMER__TROUBLE_TICKETS:
return troubleTickets != null && !troubleTickets.isEmpty();
case CustomersPackage.CUSTOMER__ERP_PERSONS:
return erpPersons != null && !erpPersons.isEmpty();
case CustomersPackage.CUSTOMER__OUTAGE_NOTIFICATIONS:
return outageNotifications != null && !outageNotifications.isEmpty();
case CustomersPackage.CUSTOMER__SPECIAL_NEED:
return SPECIAL_NEED_EDEFAULT == null ? specialNeed != null : !SPECIAL_NEED_EDEFAULT.equals(specialNeed);
case CustomersPackage.CUSTOMER__PUC_NUMBER:
return PUC_NUMBER_EDEFAULT == null ? pucNumber != null : !PUC_NUMBER_EDEFAULT.equals(pucNumber);
case CustomersPackage.CUSTOMER__WORKS:
return works != null && !works.isEmpty();
case CustomersPackage.CUSTOMER__CUSTOMER_AGREEMENTS:
return customerAgreements != null && !customerAgreements.isEmpty();
case CustomersPackage.CUSTOMER__VIP:
return vip != VIP_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (kind: ");
result.append(kind);
result.append(", specialNeed: ");
result.append(specialNeed);
result.append(", pucNumber: ");
result.append(pucNumber);
result.append(", vip: ");
result.append(vip);
result.append(')');
return result.toString();
}
} //CustomerImpl
| |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package org.jetbrains.kotlin.nj2k;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.JUnit3RunnerWithInners;
import org.jetbrains.kotlin.test.KotlinTestUtils;
import org.jetbrains.kotlin.test.TestMetadata;
import org.jetbrains.kotlin.test.TestRoot;
import org.junit.runner.RunWith;
/**
* This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}.
* DO NOT MODIFY MANUALLY.
*/
@SuppressWarnings("all")
@TestRoot("j2k/new/tests")
@TestDataPath("$CONTENT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/partialConverter")
public abstract class PartialConverterTestGenerated extends AbstractPartialConverterTest {
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/partialConverter/field")
public static class Field extends AbstractPartialConverterTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("classChildExtendsBase.java")
public void testClassChildExtendsBase() throws Exception {
runTest("testData/partialConverter/field/classChildExtendsBase.java");
}
@TestMetadata("conversion.java")
public void testConversion() throws Exception {
runTest("testData/partialConverter/field/conversion.java");
}
@TestMetadata("internalField.java")
public void testInternalField() throws Exception {
runTest("testData/partialConverter/field/internalField.java");
}
@TestMetadata("needInitializer.java")
public void testNeedInitializer() throws Exception {
runTest("testData/partialConverter/field/needInitializer.java");
}
@TestMetadata("privateField.java")
public void testPrivateField() throws Exception {
runTest("testData/partialConverter/field/privateField.java");
}
@TestMetadata("protectedField.java")
public void testProtectedField() throws Exception {
runTest("testData/partialConverter/field/protectedField.java");
}
@TestMetadata("publicField.java")
public void testPublicField() throws Exception {
runTest("testData/partialConverter/field/publicField.java");
}
@TestMetadata("specifyType.java")
public void testSpecifyType() throws Exception {
runTest("testData/partialConverter/field/specifyType.java");
}
@TestMetadata("valOrVar.java")
public void testValOrVar() throws Exception {
runTest("testData/partialConverter/field/valOrVar.java");
}
@TestMetadata("valWithInit.java")
public void testValWithInit() throws Exception {
runTest("testData/partialConverter/field/valWithInit.java");
}
@TestMetadata("varWithInit.java")
public void testVarWithInit() throws Exception {
runTest("testData/partialConverter/field/varWithInit.java");
}
@TestMetadata("varWithoutInit.java")
public void testVarWithoutInit() throws Exception {
runTest("testData/partialConverter/field/varWithoutInit.java");
}
@TestMetadata("volatileTransientAndStrictFp.java")
public void testVolatileTransientAndStrictFp() throws Exception {
runTest("testData/partialConverter/field/volatileTransientAndStrictFp.java");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/partialConverter/function")
public static class Function extends AbstractPartialConverterTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("extendsBaseWhichExtendsObject.java")
public void testExtendsBaseWhichExtendsObject() throws Exception {
runTest("testData/partialConverter/function/extendsBaseWhichExtendsObject.java");
}
@TestMetadata("externalFunctionalInterface.java")
public void testExternalFunctionalInterface() throws Exception {
runTest("testData/partialConverter/function/externalFunctionalInterface.java");
}
@TestMetadata("externalKtFunctionalInterface.java")
public void testExternalKtFunctionalInterface() throws Exception {
runTest("testData/partialConverter/function/externalKtFunctionalInterface.java");
}
@TestMetadata("final.java")
public void testFinal() throws Exception {
runTest("testData/partialConverter/function/final.java");
}
@TestMetadata("functionInFinalClass.java")
public void testFunctionInFinalClass() throws Exception {
runTest("testData/partialConverter/function/functionInFinalClass.java");
}
@TestMetadata("internal.java")
public void testInternal() throws Exception {
runTest("testData/partialConverter/function/internal.java");
}
@TestMetadata("internalFunctionalInterface.java")
public void testInternalFunctionalInterface() throws Exception {
runTest("testData/partialConverter/function/internalFunctionalInterface.java");
}
@TestMetadata("java8Lambdas.java")
public void testJava8Lambdas() throws Exception {
runTest("testData/partialConverter/function/java8Lambdas.java");
}
@TestMetadata("java8MRKFunctionExpectedType.java")
public void testJava8MRKFunctionExpectedType() throws Exception {
runTest("testData/partialConverter/function/java8MRKFunctionExpectedType.java");
}
@TestMetadata("java8MRSamConstructor.java")
public void testJava8MRSamConstructor() throws Exception {
runTest("testData/partialConverter/function/java8MRSamConstructor.java");
}
@TestMetadata("lineBreaksBetweenParameters.java")
public void testLineBreaksBetweenParameters() throws Exception {
runTest("testData/partialConverter/function/lineBreaksBetweenParameters.java");
}
@TestMetadata("main.java")
public void testMain() throws Exception {
runTest("testData/partialConverter/function/main.java");
}
@TestMetadata("main2.java")
public void testMain2() throws Exception {
runTest("testData/partialConverter/function/main2.java");
}
@TestMetadata("mainAndNullabilitySetting.java")
public void testMainAndNullabilitySetting() throws Exception {
runTest("testData/partialConverter/function/mainAndNullabilitySetting.java");
}
@TestMetadata("mainVararg.java")
public void testMainVararg() throws Exception {
runTest("testData/partialConverter/function/mainVararg.java");
}
@TestMetadata("methodClassType.java")
public void testMethodClassType() throws Exception {
runTest("testData/partialConverter/function/methodClassType.java");
}
@TestMetadata("methodWithReturnStatement.java")
public void testMethodWithReturnStatement() throws Exception {
runTest("testData/partialConverter/function/methodWithReturnStatement.java");
}
@TestMetadata("nativeMethods.java")
public void testNativeMethods() throws Exception {
runTest("testData/partialConverter/function/nativeMethods.java");
}
@TestMetadata("open.java")
public void testOpen() throws Exception {
runTest("testData/partialConverter/function/open.java");
}
@TestMetadata("override.java")
public void testOverride() throws Exception {
runTest("testData/partialConverter/function/override.java");
}
@TestMetadata("overrideAndOpen.java")
public void testOverrideAndOpen() throws Exception {
runTest("testData/partialConverter/function/overrideAndOpen.java");
}
@TestMetadata("overrideObject.java")
public void testOverrideObject() throws Exception {
runTest("testData/partialConverter/function/overrideObject.java");
}
@TestMetadata("overrideObject2.java")
public void testOverrideObject2() throws Exception {
runTest("testData/partialConverter/function/overrideObject2.java");
}
@TestMetadata("overrideObject3.java")
public void testOverrideObject3() throws Exception {
runTest("testData/partialConverter/function/overrideObject3.java");
}
@TestMetadata("overrideWithHigherVisibility.java")
public void testOverrideWithHigherVisibility() throws Exception {
runTest("testData/partialConverter/function/overrideWithHigherVisibility.java");
}
@TestMetadata("ownGenericParam.java")
public void testOwnGenericParam() throws Exception {
runTest("testData/partialConverter/function/ownGenericParam.java");
}
@TestMetadata("ownSeveralGenericParams.java")
public void testOwnSeveralGenericParams() throws Exception {
runTest("testData/partialConverter/function/ownSeveralGenericParams.java");
}
@TestMetadata("parameterModification.java")
public void testParameterModification() throws Exception {
runTest("testData/partialConverter/function/parameterModification.java");
}
@TestMetadata("private.java")
public void testPrivate() throws Exception {
runTest("testData/partialConverter/function/private.java");
}
@TestMetadata("protected.java")
public void testProtected() throws Exception {
runTest("testData/partialConverter/function/protected.java");
}
@TestMetadata("public.java")
public void testPublic() throws Exception {
runTest("testData/partialConverter/function/public.java");
}
@TestMetadata("referenceToConstructor.java")
public void testReferenceToConstructor() throws Exception {
runTest("testData/partialConverter/function/referenceToConstructor.java");
}
@TestMetadata("synchronizedMethod.java")
public void testSynchronizedMethod() throws Exception {
runTest("testData/partialConverter/function/synchronizedMethod.java");
}
@TestMetadata("varVararg.java")
public void testVarVararg() throws Exception {
runTest("testData/partialConverter/function/varVararg.java");
}
}
}
| |
/*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.machinereassignment.solver.score;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.optaplanner.core.api.score.buildin.hardsoftlong.HardSoftLongScore;
import org.optaplanner.core.api.score.constraint.ConstraintMatchTotal;
import org.optaplanner.core.impl.score.director.incremental.AbstractIncrementalScoreCalculator;
import org.optaplanner.core.impl.score.director.incremental.ConstraintMatchAwareIncrementalScoreCalculator;
import org.optaplanner.examples.machinereassignment.domain.MachineReassignment;
import org.optaplanner.examples.machinereassignment.domain.MrBalancePenalty;
import org.optaplanner.examples.machinereassignment.domain.MrGlobalPenaltyInfo;
import org.optaplanner.examples.machinereassignment.domain.MrLocation;
import org.optaplanner.examples.machinereassignment.domain.MrMachine;
import org.optaplanner.examples.machinereassignment.domain.MrMachineCapacity;
import org.optaplanner.examples.machinereassignment.domain.MrNeighborhood;
import org.optaplanner.examples.machinereassignment.domain.MrProcessAssignment;
import org.optaplanner.examples.machinereassignment.domain.MrResource;
import org.optaplanner.examples.machinereassignment.domain.MrService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MachineReassignmentIncrementalScoreCalculator
extends AbstractIncrementalScoreCalculator<MachineReassignment>
implements ConstraintMatchAwareIncrementalScoreCalculator<MachineReassignment> {
protected static final String CONSTRAINT_PACKAGE = "org.optaplanner.examples.machinereassignment.solver";
protected final transient Logger logger = LoggerFactory.getLogger(getClass());
private MachineReassignment machineReassignment;
private MrGlobalPenaltyInfo globalPenaltyInfo;
private Map<MrService, MrServiceScorePart> serviceScorePartMap;
private Map<Integer, Integer> movedProcessCountToServiceCount;
private int serviceMoveCost;
private Map<MrMachine, MrMachineScorePart> machineScorePartMap;
private long hardScore;
private long softScore;
@Override
public void resetWorkingSolution(MachineReassignment machineReassignment) {
this.machineReassignment = machineReassignment;
hardScore = 0L;
softScore = 0L;
globalPenaltyInfo = machineReassignment.getGlobalPenaltyInfo();
List<MrService> serviceList = machineReassignment.getServiceList();
serviceScorePartMap = new HashMap<>(serviceList.size());
for (MrService service : serviceList) {
serviceScorePartMap.put(service, new MrServiceScorePart(service));
}
movedProcessCountToServiceCount = new HashMap<>(serviceList.size());
movedProcessCountToServiceCount.put(0, serviceList.size());
serviceMoveCost = 0;
List<MrMachine> machineList = machineReassignment.getMachineList();
machineScorePartMap = new HashMap<>(machineList.size());
for (MrMachine machine : machineList) {
machineScorePartMap.put(machine, new MrMachineScorePart(machine));
}
for (MrProcessAssignment processAssignment : machineReassignment.getProcessAssignmentList()) {
MrMachine originalMachine = processAssignment.getOriginalMachine();
if (originalMachine != null) {
machineScorePartMap.get(originalMachine).initOriginalProcessAssignment(processAssignment);
}
}
for (MrProcessAssignment processAssignment : machineReassignment.getProcessAssignmentList()) {
insert(processAssignment);
}
}
@Override
public void beforeEntityAdded(Object entity) {
// Do nothing
}
@Override
public void afterEntityAdded(Object entity) {
// TODO the maps should probably be adjusted
insert((MrProcessAssignment) entity);
}
@Override
public void beforeVariableChanged(Object entity, String variableName) {
retract((MrProcessAssignment) entity);
}
@Override
public void afterVariableChanged(Object entity, String variableName) {
insert((MrProcessAssignment) entity);
}
@Override
public void beforeEntityRemoved(Object entity) {
retract((MrProcessAssignment) entity);
}
@Override
public void afterEntityRemoved(Object entity) {
// Do nothing
// TODO the maps should probably be adjusted
}
private void insert(MrProcessAssignment processAssignment) {
MrMachine machine = processAssignment.getMachine();
if (machine != null) {
MrServiceScorePart serviceScorePart = serviceScorePartMap.get(processAssignment.getService());
serviceScorePart.addProcessAssignment(processAssignment);
MrMachineScorePart machineScorePart = machineScorePartMap.get(machine);
machineScorePart.addProcessAssignment(processAssignment);
}
}
private void retract(MrProcessAssignment processAssignment) {
MrMachine machine = processAssignment.getMachine();
if (machine != null) {
MrServiceScorePart serviceScorePart = serviceScorePartMap.get(processAssignment.getService());
serviceScorePart.removeProcessAssignment(processAssignment);
MrMachineScorePart machineScorePart = machineScorePartMap.get(machine);
machineScorePart.removeProcessAssignment(processAssignment);
}
}
@Override
public HardSoftLongScore calculateScore() {
return HardSoftLongScore.valueOf(hardScore, softScore);
}
private class MrServiceScorePart {
private final MrService service;
private Map<MrLocation, Integer> locationBag;
private Map<MrNeighborhood, Integer> neighborhoodBag;
private int movedProcessCount;
private MrServiceScorePart(MrService service) {
this.service = service;
locationBag = new HashMap<>(machineReassignment.getLocationList().size());
hardScore -= service.getLocationSpread();
List<MrNeighborhood> neighborhoodList = machineReassignment.getNeighborhoodList();
neighborhoodBag = new HashMap<>(neighborhoodList.size());
for (MrNeighborhood neighborhood : neighborhoodList) {
neighborhoodBag.put(neighborhood, 0);
}
movedProcessCount = 0;
}
private void addProcessAssignment(MrProcessAssignment processAssignment) {
// Spread constraints
MrLocation location = processAssignment.getLocation();
Integer locationProcessCount = locationBag.get(location);
if (locationProcessCount == null) {
if (service.getLocationSpread() > locationBag.size()) {
hardScore += (service.getLocationSpread() - locationBag.size());
}
locationBag.put(location, 1);
if (service.getLocationSpread() > locationBag.size()) {
hardScore -= (service.getLocationSpread() - locationBag.size());
}
} else {
locationBag.put(location, locationProcessCount + 1);
}
// Dependency constraints
MrNeighborhood neighborhood = processAssignment.getNeighborhood();
int neighborhoodProcessCount = neighborhoodBag.get(neighborhood) + 1;
neighborhoodBag.put(neighborhood, neighborhoodProcessCount);
for (MrService toDependencyService : service.getToDependencyServiceList()) {
int toDependencyNeighborhoodProcessCount = serviceScorePartMap.get(toDependencyService)
.neighborhoodBag.get(neighborhood);
if (toDependencyNeighborhoodProcessCount == 0) {
hardScore--;
}
}
if (neighborhoodProcessCount == 1) {
for (MrService fromDependencyService : service.getFromDependencyServiceList()) {
int fromDependencyNeighborhoodProcessCount = serviceScorePartMap.get(fromDependencyService)
.neighborhoodBag.get(neighborhood);
hardScore += fromDependencyNeighborhoodProcessCount;
}
}
// Service move cost
if (processAssignment.isMoved()) {
int oldServiceCount = movedProcessCountToServiceCount.get(movedProcessCount);
movedProcessCountToServiceCount.put(movedProcessCount, oldServiceCount - 1);
if (serviceMoveCost == movedProcessCount) {
serviceMoveCost++;
softScore -= globalPenaltyInfo.getServiceMoveCostWeight();
}
movedProcessCount++;
Integer newServiceCount = movedProcessCountToServiceCount.get(movedProcessCount);
if (newServiceCount == null) {
newServiceCount = 0;
}
movedProcessCountToServiceCount.put(movedProcessCount, newServiceCount + 1);
}
}
private void removeProcessAssignment(MrProcessAssignment processAssignment) {
// Spread constraints
MrLocation location = processAssignment.getLocation();
int locationProcessCount = locationBag.get(location);
if (locationProcessCount == 1) {
if (service.getLocationSpread() > locationBag.size()) {
hardScore += (service.getLocationSpread() - locationBag.size());
}
locationBag.remove(location);
if (service.getLocationSpread() > locationBag.size()) {
hardScore -= (service.getLocationSpread() - locationBag.size());
}
} else {
locationBag.put(location, locationProcessCount - 1);
}
// Dependency constraints
MrNeighborhood neighborhood = processAssignment.getNeighborhood();
int neighborhoodProcessCount = neighborhoodBag.get(neighborhood) - 1;
neighborhoodBag.put(neighborhood, neighborhoodProcessCount);
for (MrService toDependencyService : service.getToDependencyServiceList()) {
int toDependencyNeighborhoodProcessCount = serviceScorePartMap.get(toDependencyService)
.neighborhoodBag.get(neighborhood);
if (toDependencyNeighborhoodProcessCount == 0) {
hardScore++;
}
}
if (neighborhoodProcessCount == 0) {
for (MrService fromDependencyService : service.getFromDependencyServiceList()) {
int fromDependencyNeighborhoodProcessCount = serviceScorePartMap.get(fromDependencyService)
.neighborhoodBag.get(neighborhood);
hardScore -= fromDependencyNeighborhoodProcessCount;
}
}
// Service move cost
if (processAssignment.isMoved()) {
int oldServiceCount = movedProcessCountToServiceCount.get(movedProcessCount);
// Hack: This will linger a few entries with key 0 in movedProcessCountToServiceCount
movedProcessCountToServiceCount.put(movedProcessCount, oldServiceCount - 1);
if (oldServiceCount == 1 && serviceMoveCost == movedProcessCount) {
serviceMoveCost--;
softScore += globalPenaltyInfo.getServiceMoveCostWeight();
}
movedProcessCount--;
int newServiceCount = movedProcessCountToServiceCount.get(movedProcessCount);
movedProcessCountToServiceCount.put(movedProcessCount, newServiceCount + 1);
}
}
}
private class MrMachineScorePart {
private final MrMachine machine;
private final List<MrMachineCapacityScorePart> machineCapacityScorePartList;
private Map<MrService, Integer> serviceBag;
public MrMachineScorePart(MrMachine machine) {
this.machine = machine;
List<MrMachineCapacity> machineCapacityList = machine.getMachineCapacityList();
machineCapacityScorePartList = new ArrayList<>(machineCapacityList.size());
for (MrMachineCapacity machineCapacity : machineCapacityList) {
machineCapacityScorePartList.add(new MrMachineCapacityScorePart(machineCapacity));
}
serviceBag = new HashMap<>(10);
doBalancePenaltyCosts();
}
public void initOriginalProcessAssignment(MrProcessAssignment processAssignment) {
for (MrMachineCapacityScorePart machineCapacityScorePart : machineCapacityScorePartList) {
machineCapacityScorePart.initOriginalProcessAssignment(processAssignment);
}
}
private void addProcessAssignment(MrProcessAssignment processAssignment) {
// Balance cost
undoBalancePenaltyCosts();
for (MrMachineCapacityScorePart machineCapacityScorePart : machineCapacityScorePartList) {
machineCapacityScorePart.addProcessAssignment(processAssignment);
}
// Service conflict
MrService service = processAssignment.getService();
Integer serviceProcessCountInteger = serviceBag.get(service);
int serviceProcessCount = serviceProcessCountInteger == null ? 0 : serviceProcessCountInteger;
if (serviceProcessCount > 1) {
hardScore += (serviceProcessCount - 1);
}
serviceProcessCount++;
if (serviceProcessCount > 1) {
hardScore -= (serviceProcessCount - 1);
}
serviceProcessCountInteger = serviceProcessCount == 0 ? null : serviceProcessCount;
serviceBag.put(service, serviceProcessCountInteger);
// Balance cost
doBalancePenaltyCosts();
// Move costs
if (processAssignment.isMoved()) {
// Process move cost
softScore -= processAssignment.getProcessMoveCost() * globalPenaltyInfo.getProcessMoveCostWeight();
// Machine move cost
softScore -= processAssignment.getMachineMoveCost() * globalPenaltyInfo.getMachineMoveCostWeight();
}
}
private void removeProcessAssignment(MrProcessAssignment processAssignment) {
undoBalancePenaltyCosts();
for (MrMachineCapacityScorePart machineCapacityScorePart : machineCapacityScorePartList) {
machineCapacityScorePart.removeProcessAssignment(processAssignment);
}
// Service conflict
MrService service = processAssignment.getService();
Integer serviceProcessCountInteger = serviceBag.get(service);
int serviceProcessCount = serviceProcessCountInteger == null ? 0 : serviceProcessCountInteger;
if (serviceProcessCount > 1) {
hardScore += (serviceProcessCount - 1);
}
serviceProcessCount--;
if (serviceProcessCount > 1) {
hardScore -= (serviceProcessCount - 1);
}
serviceProcessCountInteger = serviceProcessCount == 0 ? null : serviceProcessCount;
serviceBag.put(service, serviceProcessCountInteger);
doBalancePenaltyCosts();
// Move costs
if (processAssignment.isMoved()) {
// Process move cost
softScore += processAssignment.getProcessMoveCost() * globalPenaltyInfo.getProcessMoveCostWeight();
// Machine move cost
softScore += processAssignment.getMachineMoveCost() * globalPenaltyInfo.getMachineMoveCostWeight();
}
}
private void doBalancePenaltyCosts() {
for (MrBalancePenalty balancePenalty : machineReassignment.getBalancePenaltyList()) {
long originAvailable = machineCapacityScorePartList.get(balancePenalty.getOriginResource().getIndex())
.getBalanceAvailable();
long targetAvailable = machineCapacityScorePartList.get(balancePenalty.getTargetResource().getIndex())
.getBalanceAvailable();
if (originAvailable > 0L) {
long minimumTargetAvailable = originAvailable * balancePenalty.getMultiplicand();
// targetAvailable might be negative, but that's ok (and even avoids score traps)
if (targetAvailable < minimumTargetAvailable) {
softScore -= (minimumTargetAvailable - targetAvailable) * balancePenalty.getWeight();
}
}
}
}
private void undoBalancePenaltyCosts() {
for (MrBalancePenalty balancePenalty : machineReassignment.getBalancePenaltyList()) {
long originAvailable = machineCapacityScorePartList.get(balancePenalty.getOriginResource().getIndex())
.getBalanceAvailable();
long targetAvailable = machineCapacityScorePartList.get(balancePenalty.getTargetResource().getIndex())
.getBalanceAvailable();
if (originAvailable > 0L) {
long minimumTargetAvailable = originAvailable * balancePenalty.getMultiplicand();
// targetAvailable might be negative, but that's ok (and even avoids score traps)
if (targetAvailable < minimumTargetAvailable) {
softScore += (minimumTargetAvailable - targetAvailable) * balancePenalty.getWeight();
}
}
}
}
}
private class MrMachineCapacityScorePart {
private final MrMachineCapacity machineCapacity;
private long maximumAvailable;
private long safetyAvailable;
private long balanceAvailable; // == maximumAvailable without transient
private MrMachineCapacityScorePart(MrMachineCapacity machineCapacity) {
this.machineCapacity = machineCapacity;
maximumAvailable = machineCapacity.getMaximumCapacity();
safetyAvailable = machineCapacity.getSafetyCapacity();
balanceAvailable = machineCapacity.getMaximumCapacity();
}
private void initOriginalProcessAssignment(MrProcessAssignment processAssignment) {
if (machineCapacity.isTransientlyConsumed()) {
// Capacity constraints + Transient usage constraints
long processUsage = processAssignment.getProcess().getProcessRequirement(machineCapacity.getResource())
.getUsage();
hardScore -= Math.min(maximumAvailable, 0);
maximumAvailable -= processUsage;
hardScore += Math.min(maximumAvailable, 0);
}
}
private void addProcessAssignment(MrProcessAssignment processAssignment) {
MrResource resource = machineCapacity.getResource();
long processUsage = processAssignment.getUsage(resource);
if (!machineCapacity.isTransientlyConsumed() || processAssignment.isMoved()) {
// Capacity constraints + Transient usage constraints
hardScore -= Math.min(maximumAvailable, 0);
maximumAvailable -= processUsage;
hardScore += Math.min(maximumAvailable, 0);
}
// Load cost
softScore -= Math.min(safetyAvailable, 0) * resource.getLoadCostWeight();
safetyAvailable -= processUsage;
softScore += Math.min(safetyAvailable, 0) * resource.getLoadCostWeight();
balanceAvailable -= processUsage;
}
private void removeProcessAssignment(MrProcessAssignment processAssignment) {
MrResource resource = machineCapacity.getResource();
long processUsage = processAssignment.getUsage(resource);
if (!machineCapacity.isTransientlyConsumed() || processAssignment.isMoved()) {
// Capacity constraints + Transient usage constraints
hardScore -= Math.min(maximumAvailable, 0);
maximumAvailable += processUsage;
hardScore += Math.min(maximumAvailable, 0);
}
// Load cost
softScore -= Math.min(safetyAvailable, 0) * resource.getLoadCostWeight();
safetyAvailable += processUsage;
softScore += Math.min(safetyAvailable, 0) * resource.getLoadCostWeight();
balanceAvailable += processUsage;
}
public long getBalanceAvailable() {
return balanceAvailable;
}
}
@Override
public void resetWorkingSolution(MachineReassignment workingSolution, boolean constraintMatchEnabled) {
resetWorkingSolution(workingSolution);
// ignore constraintMatchEnabled, it is always presumed enabled
}
@Override
public Collection<ConstraintMatchTotal> getConstraintMatchTotals() {
ConstraintMatchTotal maximumCapacityMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "maximumCapacity", HardSoftLongScore.ZERO);
ConstraintMatchTotal serviceConflictMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "serviceConflict", HardSoftLongScore.ZERO);
ConstraintMatchTotal serviceLocationSpreadMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "serviceLocationSpread", HardSoftLongScore.ZERO);
ConstraintMatchTotal serviceDependencyMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "serviceDependency", HardSoftLongScore.ZERO);
ConstraintMatchTotal loadCostMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "loadCost", HardSoftLongScore.ZERO);
ConstraintMatchTotal balanceCostMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "balanceCost", HardSoftLongScore.ZERO);
ConstraintMatchTotal processMoveCostMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "processMoveCost", HardSoftLongScore.ZERO);
ConstraintMatchTotal serviceMoveCostMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "serviceMoveCost", HardSoftLongScore.ZERO);
ConstraintMatchTotal machineMoveCostMatchTotal = new ConstraintMatchTotal(
CONSTRAINT_PACKAGE, "machineMoveCost", HardSoftLongScore.ZERO);
for (MrServiceScorePart serviceScorePart : serviceScorePartMap.values()) {
MrService service = serviceScorePart.service;
if (service.getLocationSpread() > serviceScorePart.locationBag.size()) {
serviceLocationSpreadMatchTotal.addConstraintMatch(
Arrays.asList(service),
HardSoftLongScore.valueOf(
- (service.getLocationSpread() - serviceScorePart.locationBag.size()), 0));
}
}
for (MrMachineScorePart machineScorePart : machineScorePartMap.values()) {
for (MrMachineCapacityScorePart machineCapacityScorePart : machineScorePart.machineCapacityScorePartList) {
if (machineCapacityScorePart.maximumAvailable < 0L) {
maximumCapacityMatchTotal.addConstraintMatch(
Arrays.asList(machineCapacityScorePart.machineCapacity),
HardSoftLongScore.valueOf(machineCapacityScorePart.maximumAvailable, 0));
}
if (machineCapacityScorePart.safetyAvailable < 0L) {
loadCostMatchTotal.addConstraintMatch(
Arrays.asList(machineCapacityScorePart.machineCapacity),
HardSoftLongScore.valueOf(0, machineCapacityScorePart.safetyAvailable
* machineCapacityScorePart.machineCapacity.getResource().getLoadCostWeight()));
}
}
for (MrBalancePenalty balancePenalty : machineReassignment.getBalancePenaltyList()) {
long originAvailable = machineScorePart.machineCapacityScorePartList
.get(balancePenalty.getOriginResource().getIndex()).getBalanceAvailable();
long targetAvailable = machineScorePart.machineCapacityScorePartList
.get(balancePenalty.getTargetResource().getIndex()).getBalanceAvailable();
if (originAvailable > 0L) {
long minimumTargetAvailable = originAvailable * balancePenalty.getMultiplicand();
// targetAvailable might be negative, but that's ok (and even avoids score traps)
if (targetAvailable < minimumTargetAvailable) {
balanceCostMatchTotal.addConstraintMatch(
Arrays.asList(machineScorePart.machine, balancePenalty),
HardSoftLongScore.valueOf(0,
- (minimumTargetAvailable - targetAvailable) * balancePenalty.getWeight()));
}
}
}
for (Map.Entry<MrService, Integer> entry : machineScorePart.serviceBag.entrySet()) {
Integer serviceProcessCount = entry.getValue();
if (serviceProcessCount == null) {
serviceProcessCount = 0;
}
serviceConflictMatchTotal.addConstraintMatch(
Arrays.asList(entry.getKey()),
HardSoftLongScore.valueOf(- (serviceProcessCount - 1), 0));
}
}
for (MrProcessAssignment processAssignment : machineReassignment.getProcessAssignmentList()) {
for (MrService toDependencyService : processAssignment.getService().getToDependencyServiceList()) {
int toDependencyNeighborhoodProcessCount = serviceScorePartMap.get(toDependencyService)
.neighborhoodBag.get(processAssignment.getNeighborhood());
if (toDependencyNeighborhoodProcessCount == 0) {
serviceDependencyMatchTotal.addConstraintMatch(
Arrays.asList(processAssignment, toDependencyService),
HardSoftLongScore.valueOf(- 1, 0));
}
}
if (processAssignment.isMoved()) {
processMoveCostMatchTotal.addConstraintMatch(
Arrays.asList(processAssignment),
HardSoftLongScore.valueOf(0,
- (processAssignment.getProcessMoveCost() * globalPenaltyInfo.getProcessMoveCostWeight())));
machineMoveCostMatchTotal.addConstraintMatch(
Arrays.asList(processAssignment),
HardSoftLongScore.valueOf(0,
- (processAssignment.getMachineMoveCost() * globalPenaltyInfo.getMachineMoveCostWeight())));
}
}
for (int i = 0; i < serviceMoveCost; i++) {
serviceMoveCostMatchTotal.addConstraintMatch(
Arrays.asList(i),
HardSoftLongScore.valueOf(0, - globalPenaltyInfo.getServiceMoveCostWeight()));
}
List<ConstraintMatchTotal> constraintMatchTotalList = new ArrayList<>(4);
constraintMatchTotalList.add(maximumCapacityMatchTotal);
constraintMatchTotalList.add(serviceConflictMatchTotal);
constraintMatchTotalList.add(serviceLocationSpreadMatchTotal);
constraintMatchTotalList.add(serviceDependencyMatchTotal);
constraintMatchTotalList.add(loadCostMatchTotal);
constraintMatchTotalList.add(balanceCostMatchTotal);
constraintMatchTotalList.add(processMoveCostMatchTotal);
constraintMatchTotalList.add(serviceMoveCostMatchTotal);
constraintMatchTotalList.add(machineMoveCostMatchTotal);
return constraintMatchTotalList;
}
}
| |
/*
* BoilerPipe.java
*
* Copyright (c) 2010, The University of Sheffield.
*
* This file is part of GATE (see http://gate.ac.uk/), and is free software,
* licenced under the GNU Library General Public License, Version 3, June 2007
* (in the distribution as file licence.html, and also available at
* http://gate.ac.uk/gate/licence.html).
*
* Mark A. Greenwood, 22/10/2010
*/
package gate.creole.boilerpipe;
import static gate.GateConstants.ORIGINAL_MARKUPS_ANNOT_SET_NAME;
import gate.Annotation;
import gate.AnnotationSet;
import gate.Factory;
import gate.FeatureMap;
import gate.Utils;
import gate.creole.AbstractLanguageAnalyser;
import gate.creole.ExecutionException;
import gate.creole.ExecutionInterruptedException;
import gate.creole.metadata.CreoleParameter;
import gate.creole.metadata.CreoleResource;
import gate.creole.metadata.Optional;
import gate.creole.metadata.RunTime;
import gate.util.InvalidOffsetException;
import gate.util.OffsetComparator;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import de.l3s.boilerpipe.BoilerpipeProcessingException;
import de.l3s.boilerpipe.document.TextBlock;
import de.l3s.boilerpipe.document.TextDocument;
/**
* A GATE PR which uses the <a
* href="http://code.google.com/p/boilerpipe/">boilerpipe</a> library to
* determine which sections of a document are useful content and which are
* simply boilerplate.
*
* @see <a href="http://gate.ac.uk/userguide/sec:misc-creole:boilerpipe">The GATE
* User Guide</a>
* @author Mark A. Greenwood
*/
@CreoleResource(name = "Boilerpipe Content Detection", icon = "content_detection.png", comment = "Uses boilerpipe to determine which sections of a document are interesting content and which are just boilerplate", helpURL = "http://gate.ac.uk/userguide/sec:misc-creole:boilerpipe")
public class BoilerPipe extends AbstractLanguageAnalyser {
private static final long serialVersionUID = 5086217897382197476L;
private transient Logger logger = Logger.getLogger(this.getClass().getName());
/**
* A regular expression for finding section breaks
*/
private static final Pattern BLOCK_SEPARATOR = Pattern
.compile("(\n\r|\r\n|\n|\r){1,}");
/**
* The maximum length of a single line of text before wrapping takes place
*/
private static final int MAX_LINE_LENGTH = 80;
/**
* A counter so we can keep track of which tokens we have looked at all ready.
* This is for efficiency reasons as we never need to look at old tokens again
* but removing them from the list would be time consuming
*/
private int tokenIndex = 0;
/**
* A counter so we can keep track of which anchor tags we have looked at all
* ready. This is for efficiency reasons as we never need to look at old
* anchors again but removing them from the list would be time consuming
*/
private int anchorIndex = 0;
private Extractor extractor = Extractor.DEFAULT;
@RunTime
@CreoleParameter(comment = "The type of extractor to use to find the content", defaultValue = "DEFAULT")
public void setExtractor(Extractor extractor) {
this.extractor = extractor;
}
public Extractor getExtractor() {
return extractor;
}
private Behaviour behaviour = Behaviour.NOT_LISTED;
@RunTime
@CreoleParameter(comment = "Determines how the list of mime types is interpretted", defaultValue = "NOT_LISTED")
public void setAllContent(Behaviour behaviour) {
this.behaviour = behaviour;
}
public Behaviour getAllContent() {
return behaviour;
}
private Set<String> mimeTypes;
@RunTime
@CreoleParameter(comment = "A list of mime types that determines which documents are fully processed", defaultValue = "text/html")
public void setMimeTypes(Set<String> mimeTypes) {
this.mimeTypes = mimeTypes;
}
public Set<String> getMimeTypes() {
return mimeTypes;
}
private String contentAnnotationName;
@RunTime
@Optional
@CreoleParameter(comment = "The name of the annotations to create over real document content", defaultValue = "Content")
public void setContentAnnotationName(String contentAnnotationName) {
this.contentAnnotationName = contentAnnotationName;
}
public String getContentAnnotationName() {
return contentAnnotationName;
}
private String boilerplateAnnotationName;
@RunTime
@Optional
@CreoleParameter(comment = "The name of the annotations to create over boilerplate sections", defaultValue = "Boilerplate")
public void setBoilerplateAnnotationName(String boilerplateAnnotationName) {
this.boilerplateAnnotationName = boilerplateAnnotationName;
}
public String getBoilerplateAnnotationName() {
return boilerplateAnnotationName;
}
private boolean annotateContent = true;
@RunTime
@CreoleParameter(comment = "If true then annotations spanning content will be created", defaultValue = "true")
public void setAnnotateContent(Boolean annotateContent) {
this.annotateContent = annotateContent;
}
public Boolean getAnnotateContent() {
return annotateContent;
}
private boolean annotateBoilerplate = false;
@RunTime
@CreoleParameter(comment = "If true then annotations spanning boilerplate will be created", defaultValue = "false")
public void setAnnotateBoilerplate(Boolean annotateBoilerplate) {
this.annotateBoilerplate = annotateBoilerplate;
}
public Boolean getAnnotateBoilerplate() {
return annotateBoilerplate;
}
private boolean debug = false;
@RunTime
@CreoleParameter(comment = "In debug mode internal variables will be exposed as annotation features", defaultValue = "false")
public void setDebug(Boolean debug) {
this.debug = debug;
}
public Boolean getDebug() {
return debug;
}
private boolean failOnMissingInputAnnotations = true;
@RunTime
@CreoleParameter(comment = "Throw an exception when there are none of the required input annotations", defaultValue = "true")
public void setFailOnMissingInputAnnotations(Boolean fail) {
failOnMissingInputAnnotations = fail;
}
public Boolean getFailOnMissingInputAnnotations() {
return failOnMissingInputAnnotations;
}
private boolean useHintsFromOriginalMarkups = true;
@RunTime
@CreoleParameter(comment = "Use annotations from the Original markups as hints for finding content", defaultValue = "true")
public void setUseHintsFromOriginalMarkups(Boolean useHints) {
useHintsFromOriginalMarkups = useHints;
}
public Boolean getUseHintsFromOriginalMarkups() {
return useHintsFromOriginalMarkups;
}
private String outputASName;
@Optional
@RunTime
@CreoleParameter(comment = "The name of the output annotation set.")
public void setOutputASName(String outputASName) {
this.outputASName = outputASName;
}
public String getOutputASName() {
return outputASName;
}
private String inputASName;
@Optional
@RunTime
@CreoleParameter(comment = "The name of the input annotation set.")
public void setInputASName(String inputASName) {
this.inputASName = inputASName;
}
public String getInputASName() {
return inputASName;
}
@Override
public void execute() throws ExecutionException {
// assume we haven't been interrupted yet
interrupted = false;
// fire some progress notifications
long startTime = System.currentTimeMillis();
fireStatusChanged("Performing content detection in " + document.getName());
fireProgressChanged(0);
// if there is no document to process then stop right now
if(document == null)
throw new ExecutionException("No document to process!");
// if neither type of annotation are required then quit
if(!annotateContent && !annotateBoilerplate) return;
// if we are supposed to be annotating content then check that the
// annotation name has been set
if(annotateContent
&& (contentAnnotationName == null || contentAnnotationName.trim()
.equals("")))
throw new ExecutionException(
"You must set the name of the content annotations!");
// if we are supposed to be annotating boilerplate then check that the
// annotation name has been set
if(annotateBoilerplate
&& (boilerplateAnnotationName == null || boilerplateAnnotationName
.trim().equals("")))
throw new ExecutionException(
"You must set the name of the boilerplate annotations!");
// make sure an extractor has been specified
if(extractor == null)
throw new ExecutionException("An extractor must be specified!");
try {
// is the mime type of the document one specified in the mime type list
boolean listed =
(mimeTypes == null ? false : mimeTypes.contains(document
.getFeatures().get("MimeType")));
// find the annotation set we are supposed to be adding things to
AnnotationSet outputAS = document.getAnnotations(outputASName);
if((listed && behaviour.equals(Behaviour.LISTED))
|| (!listed && behaviour.equals(Behaviour.NOT_LISTED))) {
// if the PR has been configured in such a way that the mime type of the
// document means that we should assume it's entire contents is content
// and we are annotating content then add a single content annotation
annotateDocument(outputAS);
} else {
// we actually have to process the document so lets start doing some
// work!
// assume we start from the beginning of the document...
int startIndex = 0;
// and from the first token...
tokenIndex = 0;
// and from the first anchor...
anchorIndex = 0;
// and that the ID of the first text section will be 0
int offsetBlocks = 0;
// get the textual content of the document as we will be using this to
// determine the blocks of text we need to classify
String docContent = document.getContent().toString();
if(useHintsFromOriginalMarkups) {
try {
// see if there is a "body" annotation in the original markups
Annotation body =
document.getAnnotations(ORIGINAL_MARKUPS_ANNOT_SET_NAME)
.get("body").iterator().next();
// and if there is then we only want to consider text inside from
// where it starts
startIndex = body.getStartNode().getOffset().intValue();
} catch(Exception e) {
// if we get an exception here just assume there is no body and
// continue on regardless
}
}
// get all the tokens from the input annotation set
List<Annotation> tokens = new ArrayList<Annotation>();
tokens.addAll(document.getAnnotations(inputASName).get(
TOKEN_ANNOTATION_TYPE));
if(tokens.size() == 0) {
// if there are no tokens then either fail or print a warning
if(failOnMissingInputAnnotations) {
throw new ExecutionException(
"Either "
+ document.getName()
+ " does not have any contents or \n you need to run the tokenizer first");
} else {
Utils.logOnce(
logger,
Level.INFO,
"Content Detection: either a document does not have any text or you need to run the tokenizer first - see debug log for details.");
logger.debug("No input annotations in document "
+ document.getName());
return;
}
}
// sort the tokens to ensure they are in the same order as in the
// document
Collections.sort(tokens, new OffsetComparator());
// get all the anchors (<a></a>) tags from the original markups set
List<Annotation> anchors = new ArrayList<Annotation>();
if(useHintsFromOriginalMarkups) {
anchors.addAll(document.getAnnotations(
ORIGINAL_MARKUPS_ANNOT_SET_NAME).get("a"));
// sort the anchors so they appear in the same order as in the
// document
Collections.sort(anchors, new OffsetComparator());
}
// for ease of use we are going to build two lists both holding
// information about the set of text blocks. One will get processed,
// which invovles merging and deleting blocks, the other will remain
// untouched. The information on relevance will then be mapped back to
// the untouched list to ensure that all the text can be annotated as
// content or boilerplate properly when we have finished
List<PositionedTextBlock> origBlocks =
new ArrayList<PositionedTextBlock>();
List<TextBlock> blocks = new ArrayList<TextBlock>();
// let's start by assuming that the name of the document is it's title
String title = document.getName();
if(useHintsFromOriginalMarkups) {
try {
// if there is an actual "title" element in the original markups
// then
// use the text it spans as the title instead
Annotation t =
document.getAnnotations(ORIGINAL_MARKUPS_ANNOT_SET_NAME)
.get("title").iterator().next();
title = Utils.stringFor(document, t);
} catch(Exception e) {
// if we get an exception here just assume there is no title and
// just
// continue to use the document name
}
}
// get a matcher over the doc content so we can find the different
// blocks
Matcher m = BLOCK_SEPARATOR.matcher(docContent);
while(m.find()) {
// if we have been asked to stop then do so
if(isInterrupted()) { throw new ExecutionInterruptedException(
"The execution of the \""
+ getName()
+ "\" Boilerpipe Content Detection has been abruptly interrupted!"); }
// for each separator we find...
// try and create block that spans everything from the last block to
// this separator
PositionedTextBlock tb =
createTextBlock(docContent, tokens, anchors, startIndex,
m.start(), offsetBlocks);
if(tb != null) {
// if we created a block then...
// increment the ID ready for next time
++offsetBlocks;
// remember the end of the separator for use as the start of the
// next block
startIndex = m.end();
// store the block in both lists that we are building
blocks.add(tb);
origBlocks.add((PositionedTextBlock)tb.clone());
}
// assume that half the time is processing the tokens
fireProgressChanged((tokenIndex / tokens.size()) * 50);
}
// try and create a block from the last separator to the end of the
// document
PositionedTextBlock tb =
createTextBlock(docContent, tokens, anchors, startIndex,
docContent.length(), offsetBlocks);
if(tb != null) {
// if we created a block then store it
blocks.add(tb);
origBlocks.add((PositionedTextBlock)tb.clone());
}
// create a document object that we can pass to the boilerpipe library
TextDocument td = new TextDocument(title, blocks);
if(extractor.getInstance().process(td)) {
// if boilerpipe successfully processed the document then...
// may back from the merged blocks to the original list
for(TextBlock block : blocks) {
// if we have been asked to stop then do so
if(isInterrupted()) { throw new ExecutionInterruptedException(
"The execution of the \""
+ getName()
+ "\" Boilerpipe Content Detection has been abruptly interrupted!"); }
for(int i = block.getOffsetBlocksStart(); i <= block
.getOffsetBlocksEnd(); ++i) {
origBlocks.get(i).setIsContent(block.isContent());
}
}
// now go through the original list and merge successive blocks of the
// same type
PositionedTextBlock previous = null;
for(PositionedTextBlock block : origBlocks) {
// if we have been asked to stop then do so
if(isInterrupted()) { throw new ExecutionInterruptedException(
"The execution of the \""
+ getName()
+ "\" Boilerpipe Content Detection has been abruptly interrupted!"); }
// if this block is 75% of the document title then assume it's
// content no matter what boilerpipe says
block.setIsContent(block.isContent()
|| title.indexOf(block.getText()) != -1
&& ((float)block.getText().length() / (float)title.length()) > 0.75);
if(previous == null) {
// if we are on the first block just store it and move on
previous = block;
} else if(previous.isContent() == block.isContent()) {
// if this block is of the same type as the last then merge the
// two blocks
previous.mergeNext(block);
} else {
// we have just changed block types so annotate the last block
addAnnotation(previous, outputAS);
// and now store the new one and move on
previous = block;
}
// assume adding annotations is 50% of the work
fireProgressChanged(50 + ((block.getOffsetBlocksStart() / origBlocks
.size()) * 50));
}
// if there is still an unprocessed block then annotate it
if(previous != null) {
addAnnotation(previous, outputAS);
}
} else {
// boilerpipe usually only returns false if te document is really
// short and it doesn't try and process, at which point we should
// probably assume the whole document is content
annotateDocument(outputAS);
}
}
} catch(InvalidOffsetException ioe) {
// we should never see this exception so if we do convert it to an
// execution exception and make it someone else's problem!
throw new ExecutionException(ioe);
} catch(BoilerpipeProcessingException bpe) {
// I've no idea why this might happen so just make it someone else's
// problem if it does!
throw new ExecutionException(bpe);
} finally {
// let anyone who cares know that we have now finished
fireProcessFinished();
fireStatusChanged("Content detected in \""
+ document.getName()
+ "\" in "
+ NumberFormat.getInstance().format(
(double)(System.currentTimeMillis() - startTime) / 1000)
+ " seconds!");
}
}
/**
* Adds an annotation to the document to represent the supplied text block
*
* @param block
* the block that should become an annotation
* @param annotationSet
* the annotation set to add the annotation to
* @throws InvalidOffsetException
* if the block falls outside of the document
*/
private void addAnnotation(PositionedTextBlock block,
AnnotationSet annotationSet) throws InvalidOffsetException {
// only do something if the PR is configured to annotate this type of block
if((annotateContent && block.isContent())
|| (annotateBoilerplate && !block.isContent())) {
// creae a new feature map to hold any features
FeatureMap params = Factory.newFeatureMap();
if(debug) {
// if we are in debug mode dump everything we know about this block into
// the feature map
params.put("content", block.isContent());
params.put("start", block.getOffsetBlocksStart());
params.put("end", block.getOffsetBlocksEnd());
params.put("nwiwl", block.getNumWordsInWrappedLines());
params.put("nwl", block.getNumWrappedLines());
params.put("ld", block.getLinkDensity());
}
// now actually create and add the annotation to the annotation set
annotationSet.add((long)block.getStartOffset(), (long)block
.getEndOffset(), block.isContent()
? contentAnnotationName
: boilerplateAnnotationName, params);
}
}
private void annotateDocument(AnnotationSet annotationSet)
throws InvalidOffsetException {
if(annotateContent) {
FeatureMap params = Factory.newFeatureMap();
if(debug) params.put("content", Boolean.TRUE);
long start = 0;
long end = document.getContent().size();
if(useHintsFromOriginalMarkups) {
try {
// see if there is a "body" annotation in the original markups
AnnotationSet body =
document.getAnnotations(ORIGINAL_MARKUPS_ANNOT_SET_NAME).get(
"body");
if(body.size() > 0) {
// use the body annotation rather than the whole content
start = body.firstNode().getOffset();
end = body.lastNode().getOffset();
}
} catch(Exception e) {
// if we get an exception here just assume there is no body and
// continue on regardless
}
}
annotationSet.add(start, end, contentAnnotationName, params);
}
}
/**
* Creates a PositionedTextBlock for boilerpipe from a section of text and the
* annotations that overlap with it.
*
* @param docContent
* the String content of the document
* @param tokens
* all the Token annotations in the document (sorted into order)
* @param anchors
* all the a annotations from the original markups set (sorted into
* order)
* @param start
* the start offset of this block
* @param end
* the end offset of this block
* @param offset
* of block offset
* @return a newly created PositionedTextBlock or null if we didn't create one
*/
private PositionedTextBlock createTextBlock(String docContent,
List<Annotation> tokens, List<Annotation> anchors, int start,
int end, int offset) {
// This is basically a re-write of
// de.l3s.boilerpipe.sax.boilerpipeHTMLContentHandler
// using the information already available in the document through GATE so
// that a) we don't have to re-parse the documents b) can support formats
// other than HTML and c) can map the blocks directly back to the GATE
// document so that we can add annotations in the correct place. I've even
// reimplemented what I think are bugs in the original (tokens that trigger
// a wrapped line appear to be counted twice)
// if we haven't got to the start yet then stop!
if(end - start <= 0) return null;
// don't count the first space
int currentLineLength = -1;
// the number of words in the block
int numWords = 0;
// the number of words that fall within a link
int numLinkedWords = 0;
// the number of lines in the block (80 chars per line)
int numWrappedLines = 0;
// number of tokens (i.e. words + punctuation etc.)
int numTokens = 0;
// number of words on the current line
int numWordsCurrentLine = 0;
while(tokenIndex < tokens.size()) {
// while there are still tokens to process
// get the current token
Annotation token = tokens.get(tokenIndex);
// if we have moved onto a token after the block then stop looking
if(token.getStartNode().getOffset() > end) break;
// update the token counter ready for next time around the loop
++tokenIndex;
if(token.getEndNode().getOffset() < start) {
// if we are before the beginning of the block then skip on to the next
// token
continue;
}
// add one to the number of tokens in this block
numTokens++;
// get the type of the token
String tokenKind =
(String)token.getFeatures().get(TOKEN_KIND_FEATURE_NAME);
if("word".equals(tokenKind)) {
// if this token is a word then...
// increase the number of words in the block
numWords++;
// add one to the number of words in the current line
numWordsCurrentLine++;
// get the length of the token
int tokenLength = Utils.length(token);
// add the length of the token to the length of the line
currentLineLength += tokenLength + 1;
if(currentLineLength > MAX_LINE_LENGTH) {
// if the line is now longer than the max...
// add one to the number of lines in the block
numWrappedLines++;
// set the length of the new line to the length of the token
currentLineLength = tokenLength;
// and there is now just one word on the current line
numWordsCurrentLine = 1;
}
// skip over anchors before the token starts
Annotation a = null;
while(anchorIndex < anchors.size()) {
a = anchors.get(anchorIndex);
if(a.getEndNode().getOffset() > token.getStartNode().getOffset())
break;
++anchorIndex;
}
// check to see if the token overlaps with the anchor we have found
if(a != null
&& a.getStartNode().getOffset() <= token.getEndNode()
.getOffset()) {
// if we are in an anchor then add one more to the list of linked
// words
numLinkedWords++;
}
}
}
// if there are no tokens under this block then don't create a block
if(numTokens == 0) { return null; }
// work out how many words are in the wrapped lines
int numWordsInWrappedLines;
if(numWrappedLines == 0) {
numWordsInWrappedLines = numWords;
numWrappedLines = 1;
} else {
numWordsInWrappedLines = numWords - numWordsCurrentLine;
}
// now create and return the block ready for it to be passed to the
// boilerpipe library
return new PositionedTextBlock(docContent.substring(start, end).trim(),
new BitSet(), numWords, numLinkedWords, numWordsInWrappedLines,
numWrappedLines, offset, start, end);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.client.api.impl;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.yarn.api.ContainerManagementProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.ContainerUpdateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ContainerUpdateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReInitializeContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.client.api.NMClient;
import org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy.ContainerManagementProtocolProxyData;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.ipc.RPCUtil;
/**
* <p>
* This class implements {@link NMClient}. All the APIs are blocking.
* </p>
*
* <p>
* By default, this client stops all the running containers that are started by
* it when it stops. It can be disabled via
* {@link #cleanupRunningContainersOnStop}, in which case containers will
* continue to run even after this client is stopped and till the application
* runs at which point ResourceManager will forcefully kill them.
* </p>
*
* <p>
* Note that the blocking APIs ensure the RPC calls to <code>NodeManager</code>
* are executed immediately, and the responses are received before these APIs
* return. However, when {@link #startContainer} or {@link #stopContainer}
* returns, <code>NodeManager</code> may still need some time to either start
* or stop the container because of its asynchronous implementation. Therefore,
* {@link #getContainerStatus} is likely to return a transit container status
* if it is executed immediately after {@link #startContainer} or
* {@link #stopContainer}.
* </p>
*/
@Private
@Unstable
public class NMClientImpl extends NMClient {
private static final Log LOG = LogFactory.getLog(NMClientImpl.class);
// The logically coherent operations on startedContainers is synchronized to
// ensure they are atomic
protected ConcurrentMap<ContainerId, StartedContainer> startedContainers =
new ConcurrentHashMap<ContainerId, StartedContainer>();
//enabled by default
private final AtomicBoolean cleanupRunningContainers = new AtomicBoolean(true);
private ContainerManagementProtocolProxy cmProxy;
public NMClientImpl() {
super(NMClientImpl.class.getName());
}
public NMClientImpl(String name) {
super(name);
}
@Override
protected void serviceStop() throws Exception {
// Usually, started-containers are stopped when this client stops. Unless
// the flag cleanupRunningContainers is set to false.
if (getCleanupRunningContainers().get()) {
cleanupRunningContainers();
}
cmProxy.stopAllProxies();
super.serviceStop();
}
protected synchronized void cleanupRunningContainers() {
for (StartedContainer startedContainer : startedContainers.values()) {
try {
stopContainer(startedContainer.getContainerId(),
startedContainer.getNodeId());
} catch (YarnException e) {
LOG.error("Failed to stop Container " +
startedContainer.getContainerId() +
"when stopping NMClientImpl");
} catch (IOException e) {
LOG.error("Failed to stop Container " +
startedContainer.getContainerId() +
"when stopping NMClientImpl");
}
}
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
super.serviceInit(conf);
if (getNMTokenCache() == null) {
throw new IllegalStateException("NMTokenCache has not been set");
}
cmProxy = new ContainerManagementProtocolProxy(conf, getNMTokenCache());
}
@Override
public void cleanupRunningContainersOnStop(boolean enabled) {
getCleanupRunningContainers().set(enabled);
}
protected static class StartedContainer {
private ContainerId containerId;
private NodeId nodeId;
private ContainerState state;
public StartedContainer(ContainerId containerId, NodeId nodeId) {
this.containerId = containerId;
this.nodeId = nodeId;
state = ContainerState.NEW;
}
public ContainerId getContainerId() {
return containerId;
}
public NodeId getNodeId() {
return nodeId;
}
}
private void addStartingContainer(StartedContainer startedContainer)
throws YarnException {
if (startedContainers.putIfAbsent(startedContainer.containerId,
startedContainer) != null) {
throw RPCUtil.getRemoteException("Container "
+ startedContainer.containerId.toString() + " is already started");
}
}
@Override
public Map<String, ByteBuffer> startContainer(
Container container, ContainerLaunchContext containerLaunchContext)
throws YarnException, IOException {
// Do synchronization on StartedContainer to prevent race condition
// between startContainer and stopContainer only when startContainer is
// in progress for a given container.
StartedContainer startingContainer =
new StartedContainer(container.getId(), container.getNodeId());
synchronized (startingContainer) {
addStartingContainer(startingContainer);
Map<String, ByteBuffer> allServiceResponse;
ContainerManagementProtocolProxyData proxy = null;
try {
proxy =
cmProxy.getProxy(container.getNodeId().toString(),
container.getId());
StartContainerRequest scRequest =
StartContainerRequest.newInstance(containerLaunchContext,
container.getContainerToken());
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests =
StartContainersRequest.newInstance(list);
StartContainersResponse response =
proxy
.getContainerManagementProtocol().startContainers(allRequests);
if (response.getFailedRequests() != null
&& response.getFailedRequests().containsKey(container.getId())) {
Throwable t =
response.getFailedRequests().get(container.getId()).deSerialize();
parseAndThrowException(t);
}
allServiceResponse = response.getAllServicesMetaData();
startingContainer.state = ContainerState.RUNNING;
} catch (YarnException | IOException e) {
startingContainer.state = ContainerState.COMPLETE;
// Remove the started container if it failed to start
startedContainers.remove(startingContainer.containerId);
throw e;
} catch (Throwable t) {
startingContainer.state = ContainerState.COMPLETE;
startedContainers.remove(startingContainer.containerId);
throw RPCUtil.getRemoteException(t);
} finally {
if (proxy != null) {
cmProxy.mayBeCloseProxy(proxy);
}
}
return allServiceResponse;
}
}
@Override
public void increaseContainerResource(Container container)
throws YarnException, IOException {
ContainerManagementProtocolProxyData proxy = null;
try {
proxy = cmProxy.getProxy(
container.getNodeId().toString(), container.getId());
List<Token> increaseTokens = new ArrayList<>();
increaseTokens.add(container.getContainerToken());
ContainerUpdateRequest request =
ContainerUpdateRequest.newInstance(increaseTokens);
ContainerUpdateResponse response =
proxy.getContainerManagementProtocol().updateContainer(request);
if (response.getFailedRequests() != null
&& response.getFailedRequests().containsKey(container.getId())) {
Throwable t = response.getFailedRequests().get(container.getId())
.deSerialize();
parseAndThrowException(t);
}
} finally {
if (proxy != null) {
cmProxy.mayBeCloseProxy(proxy);
}
}
}
@Override
public void stopContainer(ContainerId containerId, NodeId nodeId)
throws YarnException, IOException {
StartedContainer startedContainer = startedContainers.get(containerId);
// Only allow one request of stopping the container to move forward
// When entering the block, check whether the precursor has already stopped
// the container
if (startedContainer != null) {
synchronized (startedContainer) {
if (startedContainer.state != ContainerState.RUNNING) {
return;
}
stopContainerInternal(containerId, nodeId);
// Only after successful
startedContainer.state = ContainerState.COMPLETE;
startedContainers.remove(startedContainer.containerId);
}
} else {
stopContainerInternal(containerId, nodeId);
}
}
@Override
public ContainerStatus getContainerStatus(ContainerId containerId,
NodeId nodeId) throws YarnException, IOException {
ContainerManagementProtocolProxyData proxy = null;
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(containerId);
try {
proxy = cmProxy.getProxy(nodeId.toString(), containerId);
GetContainerStatusesResponse response =
proxy.getContainerManagementProtocol().getContainerStatuses(
GetContainerStatusesRequest.newInstance(containerIds));
if (response.getFailedRequests() != null
&& response.getFailedRequests().containsKey(containerId)) {
Throwable t =
response.getFailedRequests().get(containerId).deSerialize();
parseAndThrowException(t);
}
ContainerStatus containerStatus = response.getContainerStatuses().get(0);
return containerStatus;
} finally {
if (proxy != null) {
cmProxy.mayBeCloseProxy(proxy);
}
}
}
@Override
public void reInitializeContainer(ContainerId containerId,
ContainerLaunchContext containerLaunchContex, boolean autoCommit)
throws YarnException, IOException {
ContainerManagementProtocolProxyData proxy = null;
StartedContainer container = startedContainers.get(containerId);
if (container != null) {
synchronized (container) {
proxy = cmProxy.getProxy(container.getNodeId().toString(), containerId);
try {
proxy.getContainerManagementProtocol().reInitializeContainer(
ReInitializeContainerRequest.newInstance(
containerId, containerLaunchContex, autoCommit));
} finally {
if (proxy != null) {
cmProxy.mayBeCloseProxy(proxy);
}
}
}
} else {
throw new YarnException("Unknown container [" + containerId + "]");
}
}
@Override
public void restartContainer(ContainerId containerId)
throws YarnException, IOException {
restartCommitOrRollbackContainer(containerId, UpgradeOp.RESTART);
}
@Override
public void rollbackLastReInitialization(ContainerId containerId)
throws YarnException, IOException {
restartCommitOrRollbackContainer(containerId, UpgradeOp.ROLLBACK);
}
@Override
public void commitLastReInitialization(ContainerId containerId)
throws YarnException, IOException {
restartCommitOrRollbackContainer(containerId, UpgradeOp.COMMIT);
}
private void restartCommitOrRollbackContainer(ContainerId containerId,
UpgradeOp upgradeOp) throws YarnException, IOException {
ContainerManagementProtocolProxyData proxy = null;
StartedContainer container = startedContainers.get(containerId);
if (container != null) {
synchronized (container) {
proxy = cmProxy.getProxy(container.getNodeId().toString(), containerId);
ContainerManagementProtocol cmp =
proxy.getContainerManagementProtocol();
try {
switch (upgradeOp) {
case RESTART:
cmp.restartContainer(containerId);
break;
case COMMIT:
cmp.commitLastReInitialization(containerId);
break;
case ROLLBACK:
cmp.rollbackLastReInitialization(containerId);
break;
default:
// Should not happen..
break;
}
} finally {
if (proxy != null) {
cmProxy.mayBeCloseProxy(proxy);
}
}
}
} else {
throw new YarnException("Unknown container [" + containerId + "]");
}
}
private void stopContainerInternal(ContainerId containerId, NodeId nodeId)
throws IOException, YarnException {
ContainerManagementProtocolProxyData proxy = null;
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(containerId);
try {
proxy = cmProxy.getProxy(nodeId.toString(), containerId);
StopContainersResponse response =
proxy.getContainerManagementProtocol().stopContainers(
StopContainersRequest.newInstance(containerIds));
if (response.getFailedRequests() != null
&& response.getFailedRequests().containsKey(containerId)) {
Throwable t = response.getFailedRequests().get(containerId)
.deSerialize();
parseAndThrowException(t);
}
} finally {
if (proxy != null) {
cmProxy.mayBeCloseProxy(proxy);
}
}
}
public AtomicBoolean getCleanupRunningContainers() {
return cleanupRunningContainers;
}
private void parseAndThrowException(Throwable t) throws YarnException,
IOException {
if (t instanceof YarnException) {
throw (YarnException) t;
} else if (t instanceof InvalidToken) {
throw (InvalidToken) t;
} else {
throw (IOException) t;
}
}
@Override
public NodeId getNodeIdOfStartedContainer(ContainerId containerId) {
StartedContainer container = startedContainers.get(containerId);
if (container != null) {
return container.getNodeId();
}
return null;
}
}
| |
// Copyright (c) 2003 Compaq Corporation. All rights reserved.
// Portions Copyright (c) 2003 Microsoft Corporation. All rights reserved.
// Last modified on Mon 30 Apr 2007 at 15:30:03 PST by lamport
// modified on Fri Feb 16 14:26:21 PST 2001 by yuanyu
package tlc2.util;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.function.Function;
import tla2sany.semantic.SymbolNode;
import tlc2.value.impl.StringValue;
import tlc2.value.impl.Value;
import util.UniqueString;
// Context is used two times:
// 1) To determine the level boundedness of the expression appearing in the spec
// (see Tool#getLevelBounds*). This is done once as part of parsing.
// 2) To store the variable/name binding at each scope during state exploration.
// The hierarchy of levels/scopes is represented as a chain of context instances
// (link-list or associative list). The Empty context is the outer most scope
// (first level).
// 1) is not relevant performance-wise, but 2) has - provided non-trivial level -
// a significant performance impact because of excessive lookups. What makes
// optimizations difficult, is the fact that the context chain is recreated each
// time a state is generated, as part of the next state relation. An optimization
// thus has to ensure that it doesn't trade fast - ideally constant - lookups for
// an equally expensive creation complexity.
//
// The contrived spec at the bottom exhibits this problem. Increasing the level,
// the number of lookups go through the roof.
public final class Context implements Iterator<Context> {
/**
* A link list of name and value pairs. When adding <name, value> to the
* context, we assume that name != null.
*/
private final SymbolNode name;
private final Object value;
private final Context next;
public final static Context Empty = new Context(null, null, null);
private final static Context BaseBranch = new Context(null, null, Empty);
private Context(SymbolNode name, Object value, final Context next) {
this.name = name;
this.value = value;
this.next = next;
}
// This method is only called within the context of the ENABLED (temporal)
// operator. A branching context is specially handled during lookup below
// if cutoff is true.
public static Context branch(Context base) {
if (base == Empty) {
// Avoid new instance if the next context in the chain is the Empty
// one (Branch -> Empty).
return BaseBranch;
}
return new Context(null, null, base);
}
public final Context cons(SymbolNode name, Object value) {
return new Context(name, value, this);
}
/**
* This method returns the value for the name var. It returns null if this
* context does not contain var.
*/
public final Object lookup(SymbolNode var) {
Context cur = this;
// Follow the linked list of Contexts (chain) starting at this context
// until a Context has been reached whose name (SymbolNode) is identical
// to the searched for var. Stop if the Empty context (the base of all
// Context "chains") has been reached.
while (cur != Empty) {
// Check identity of value if match (this is slightly simpler
// compared to the second lookup method. Here we can ignore the else
// if branch since there is no cutoff.
if (var == cur.name) {
return cur.value;
}
cur = cur.next;
}
return null; // On Empty Context (end of chain), return null value
}
public final Object lookup(final Function<SymbolNode, Boolean> f) {
Context cur = this;
while (cur != Empty) {
if (f.apply(cur.name)) {
return cur.value;
}
cur = cur.next;
}
return null;
}
public final SymbolNode lookupName(final Function<SymbolNode, Boolean> f) {
Context cur = this;
while (cur != Empty) {
if (f.apply(cur.name)) {
return cur.name;
}
cur = cur.next;
}
return null;
}
/**
* @param var
* The SymbolNode to lookup
* @param cutoff
* Iff true, lookup stops at a branching Context. Follows
* complete chain if false.
* @return value associated with the {@link SymbolNode} var or null if var
* could not be found in the search along the Context "chain"
*/
public final Object lookup(final SymbolNode var, final boolean cutoff) {
Context cur = this;
// Follow the linked list of Contexts (chain) starting at this context until a Context has been
// reached whose name (SymbolNode) is identical to the searched for var. Stop if the Context's
// name is null, which is the case for a branching Context (see branch(..)
// above) or the Empty context (the base of all Context "chains") has been reached.
while (cur != Empty) {
// Check identity of value if not empty or branching
if (cur.name != null) {
if (var == cur.name) {
return cur.value;
}
} else if (cutoff == true) {
// reached a branching context (value is null)
assert cur.value == null;
return null;
}
cur = cur.next;
}
return null; // On Empty Context (end of chain), return null value
}
public final Map<UniqueString, Value> toMap() {
if (this.name == null) {
if (this == Empty) {
return new HashMap<>();
}
return this.next.toMap();
}
final Map<UniqueString, Value> res = new HashMap<>();
res.put(this.name.getName(),
this.value instanceof Value ? (Value) this.value : new StringValue(this.value.toString()));
Context cur;
for (cur = this.next; cur.name != null; cur = cur.next) {
res.put(cur.name.getName(),
cur.value instanceof Value ? (Value) cur.value : new StringValue(cur.value.toString()));
}
res.putAll(cur.toMap());
return res;
}
public final StringBuffer toString(StringBuffer sb) {
if (this.name == null) {
if (this == Empty) {
return sb;
}
return this.next.toString(sb);
}
sb.append(this.name.getName());
sb.append("->");
sb.append(this.value);
Context cur;
for (cur = this.next; cur.name != null; cur = cur.next) {
sb.append(", ");
sb.append(cur.name.getName());
sb.append("->");
sb.append(cur.value);
}
cur.toString(sb);
return sb;
}
public final String toString() {
StringBuffer sb = new StringBuffer("[");
sb = this.toString(sb);
sb.append("]");
return sb.toString();
}
@Override
public boolean hasNext() {
return this.next != null;
}
@Override
public Context next() {
return this.next;
}
public final SymbolNode getName() {
return name;
}
public final Object getValue() {
return value;
}
public final boolean isEmpty() {
return this == Empty;
}
public final int depth() {
int depth = 1;
Context child = next();
while (child.hasNext()) {
depth++;
child = child.next();
}
return depth;
}
public Context deepCopy() {
if (this == Empty) {
return this;
}
return new Context(this.name, this.value, this.next.deepCopy());
}
}
/*
----------------------------- MODULE Scoping -----------------------------
EXTENDS Naturals
CONSTANT Limit
VARIABLE var
A(a) == TRUE
B(b) == A(b)
C(c) == B(c)
D(d) == C(d)
E(e) == D(e)
F(f) == E(f)
G(g) == F(g)
H(h) == G(h)
I(i) == H(i)
J(j) == I(j)
K(k) == J(k)
L(l) == K(l)
M(m) == L(m)
N(n) == M(n)
O(o) == N(o)
P(p) == O(p)
Q(q) == P(q)
R(r) == Q(r)
S(s) == R(s)
T(t) == S(t)
U(u) == T(u)
V(v) == U(v)
W(w) == V(w)
X(x) == W(x)
Y(y) == X(y)
Z(z) == U(z)
Next == /\ var' = var + 1
/\ var < 1
/\ Z(1)
Spec == var=0 /\ [][Next]_<<var>>
=============================================================================
*/
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.catalog.server.channel;
//JDK imports
import org.apache.oodt.cas.catalog.exception.CatalogServiceException;
import org.apache.oodt.cas.catalog.metadata.TransactionalMetadata;
import org.apache.oodt.cas.catalog.page.CatalogReceipt;
import org.apache.oodt.cas.catalog.page.Page;
import org.apache.oodt.cas.catalog.page.PageInfo;
import org.apache.oodt.cas.catalog.page.QueryPager;
import org.apache.oodt.cas.catalog.page.TransactionReceipt;
import org.apache.oodt.cas.catalog.query.QueryExpression;
import org.apache.oodt.cas.catalog.struct.Dictionary;
import org.apache.oodt.cas.catalog.struct.Index;
import org.apache.oodt.cas.catalog.struct.TransactionId;
import org.apache.oodt.cas.catalog.system.Catalog;
import org.apache.oodt.cas.catalog.system.CatalogService;
import org.apache.oodt.cas.catalog.util.PluginURL;
import org.apache.oodt.cas.catalog.util.Serializer;
import org.apache.oodt.cas.metadata.Metadata;
import java.net.URL;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
//OODT imports
/**
* @author bfoster
* @version $Revision$
*
* <p>
* An Abstract Communication Channel Server Interface that automatically handles throw exceptions
* <p>
*/
public abstract class AbstractCommunicationChannelServer implements CommunicationChannelServer {
private static Logger LOG = Logger.getLogger(AbstractCommunicationChannelServer.class.getName());
protected CatalogService catalogService;
protected int port;
protected Serializer serializer;
public AbstractCommunicationChannelServer() {
this.serializer = new Serializer();
}
public void setCatalogService(CatalogService catalogService) {
this.catalogService = catalogService;
}
public void setPort(int port) {
this.port = port;
}
public int getPort() {
return this.port;
}
public void shutdown() throws CatalogServiceException {
try {
this.catalogService.shutdown();
this.catalogService = null;
System.gc(); // used to speed up shutdown process (gives java a boost-start at cleaning up everything so server will die)
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed to shutdown server : " + e.getMessage(), e);
throw e;
}
}
public boolean isRestrictQueryPermissions() throws CatalogServiceException {
try {
return this.catalogService.isRestrictQueryPermissions();
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while checking server query permissions : " + e.getMessage(), e);
throw e;
}
}
public boolean isRestrictIngestPermissions() throws CatalogServiceException {
try {
return this.catalogService.isRestrictIngestPermissions();
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while checking server ingest permissions : " + e.getMessage(), e);
throw e;
}
}
public void addCatalog(Catalog catalog) throws CatalogServiceException {
try {
this.catalogService.addCatalog(catalog);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while adding catalog '" + catalog + "' to server : " + e.getMessage(), e);
throw e;
}
}
public void replaceCatalog(Catalog catalog) throws CatalogServiceException {
try {
this.catalogService.replaceCatalog(catalog);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while replacing catalog '" + catalog + "' to server : " + e.getMessage(), e);
throw e;
}
}
public void addCatalog(String catalogId, Index index) throws CatalogServiceException {
try {
this.catalogService.addCatalog(catalogId, index);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while adding catalog '" + catalogId + "' with index '" + index + "' to server : " + e.getMessage(), e);
throw e;
}
}
public void addCatalog(String catalogId, Index index, List<Dictionary> dictionaries) throws CatalogServiceException {
try {
this.catalogService.addCatalog(catalogId, index, dictionaries);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while adding catalog '" + catalogId + "' with index '" + index + "' and dictionaries '" + dictionaries + "' to server : " + e.getMessage(), e);
throw e;
}
}
public void addCatalog(String catalogId, Index index, List<Dictionary> dictionaries, boolean restrictQueryPermission, boolean restrictIngestPermission) throws CatalogServiceException {
try {
this.catalogService.addCatalog(catalogId, index, dictionaries, restrictQueryPermission, restrictIngestPermission);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while adding catalog '" + catalogId + "' with index '" + index + "' and dictionaries '" + dictionaries + "' and restrictQueryPermission '" + restrictQueryPermission + "' and restrictIngestPermission '" + restrictIngestPermission + "' to server : " + e.getMessage(), e);
throw e;
}
}
public void addDictionary(String catalogId, Dictionary dictionary) throws CatalogServiceException {
try {
this.catalogService.addDictionary(catalogId, dictionary);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while adding dictionary '" + dictionary + "' to catalog '" + catalogId + "' : " + e.getMessage(), e);
throw e;
}
}
public void replaceDictionaries(String catalogId, List<Dictionary> dictionaries) throws CatalogServiceException {
try {
this.catalogService.replaceDictionaries(catalogId, dictionaries);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while replacing dictionaries '" + dictionaries + "' in catalog '" + catalogId + "' : " + e.getMessage(), e);
throw e;
}
}
public void replaceIndex(String catalogId, Index index) throws CatalogServiceException {
try {
this.catalogService.replaceIndex(catalogId, index);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while replacing index '" + index + "' in catalog '" + catalogId + "' : " + e.getMessage(), e);
throw e;
}
}
public void modifyIngestPermission(String catalogId, boolean restrictIngestPermission) throws CatalogServiceException {
try {
this.catalogService.modifyIngestPermission(catalogId, restrictIngestPermission);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while changing ingest permissions for catalog '" + catalogId + "' to '" + restrictIngestPermission + "' : " + e.getMessage(), e);
throw e;
}
}
public void modifyQueryPermission(String catalogId, boolean restrictQueryPermission) throws CatalogServiceException {
try {
this.catalogService.modifyQueryPermission(catalogId, restrictQueryPermission);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while changing query permissions for catalog '" + catalogId + "' to '" + restrictQueryPermission + "' : " + e.getMessage(), e);
throw e;
}
}
public void removeCatalog(String catalogId) throws CatalogServiceException {
try {
this.catalogService.removeCatalog(catalogId);
} catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while removing catalog '" + catalogId + "' : " + e.getMessage(), e);
throw e;
}
}
public List<PluginURL> getPluginUrls() throws CatalogServiceException {
try {
return this.catalogService.getPluginUrls();
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting plugin URLs : " + e.getMessage(), e);
throw e;
}
}
public void addPluginUrls(List<PluginURL> pluginURLs) throws CatalogServiceException {
try {
this.catalogService.addPluginUrls(pluginURLs);
this.serializer.refreshClassLoader();
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while adding plugin URLs '" + pluginURLs + "' : " + e.getMessage(), e);
throw e;
}
}
public URL getPluginStorageDir() throws CatalogServiceException {
try {
return this.catalogService.getPluginStorageDir();
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting plugin storage directory : " + e.getMessage(), e);
throw e;
}
}
public Set<String> getCurrentCatalogIds() throws CatalogServiceException {
try {
return this.catalogService.getCurrentCatalogIds();
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting current catalog ids : " + e.getMessage(), e);
throw e;
}
}
public TransactionReceipt ingest(Metadata metadata) throws CatalogServiceException {
try {
return this.catalogService.ingest(metadata);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while performing ingest : " + e.getMessage(), e);
throw e;
}
}
public void delete(Metadata metadata) throws CatalogServiceException {
try {
this.catalogService.delete(metadata);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while performing deletion : " + e.getMessage(), e);
throw e;
}
}
public List<String> getProperty(String key) throws CatalogServiceException {
try {
return this.catalogService.getProperty(key);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting property '" + key + "' : " + e.getMessage(), e);
throw e;
}
}
public Properties getCalalogProperties() throws CatalogServiceException {
try {
return this.catalogService.getCalalogProperties();
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting catalog properties : " + e.getMessage(), e);
throw e;
}
}
public Properties getCalalogProperties(String catalogId) throws CatalogServiceException {
try {
return this.catalogService.getCalalogProperties(catalogId);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting catalog properties for catalog '" + catalogId + "' : " + e.getMessage(), e);
throw e;
}
}
public Page getNextPage(Page page) throws CatalogServiceException {
try {
return this.catalogService.getNextPage(page);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting next page : " + e.getMessage(), e);
throw e;
}
}
public Page getPage(PageInfo pageInfo, QueryExpression queryExpression) throws CatalogServiceException {
try {
return this.catalogService.getPage(pageInfo, queryExpression);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting next page [pageInfo='" + pageInfo + "',query='" + queryExpression + "'] : " + e.getMessage(), e);
throw e;
}
}
public Page getPage(PageInfo pageInfo, QueryExpression queryExpression, Set<String> catalogIds) throws CatalogServiceException {
try {
return this.catalogService.getPage(pageInfo, queryExpression, catalogIds);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting next page [pageInfo='" + pageInfo + "',query='" + queryExpression + "',catalogIds='" + catalogIds + "'] : " + e.getMessage(), e);
throw e;
}
}
public List<TransactionalMetadata> getMetadata(Page page) throws CatalogServiceException {
try {
return this.catalogService.getMetadata(page);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting metadata for page : " + e.getMessage(), e);
throw e;
}
}
public QueryPager query(QueryExpression queryExpression) throws CatalogServiceException {
try {
return this.catalogService.query(queryExpression);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while performing query '" + queryExpression + "' : " + e.getMessage(), e);
throw e;
}
}
public QueryPager query(QueryExpression queryExpression, Set<String> catalogIds) throws CatalogServiceException {
try {
return this.catalogService.query(queryExpression, catalogIds);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while performing query '" + queryExpression + "' to catalogs '" + catalogIds + "' : " + e.getMessage(), e);
throw e;
}
}
public List<TransactionalMetadata> getNextPage(QueryPager queryPager) throws CatalogServiceException {
try {
return this.catalogService.getNextPage(queryPager);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while get next page from query pager : " + e.getMessage(), e);
throw e;
}
}
public List<TransactionalMetadata> getAllPages(QueryPager queryPager) throws CatalogServiceException {
try {
return this.catalogService.getAllPages(queryPager);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while get all pages from query pager : " + e.getMessage(), e);
throw e;
}
}
public List<TransactionalMetadata> getMetadataFromTransactionIdStrings(List<String> catalogServiceTransactionIdStrings) throws CatalogServiceException {
try {
return this.catalogService.getMetadataFromTransactionIdStrings(catalogServiceTransactionIdStrings);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting metadata for catalog service transaction ids '" + catalogServiceTransactionIdStrings + "' : " + e.getMessage(), e);
throw e;
}
}
public List<TransactionalMetadata> getMetadataFromTransactionIds(List<TransactionId<?>> catalogServiceTransactionIds) throws CatalogServiceException {
try {
return this.catalogService.getMetadataFromTransactionIds(catalogServiceTransactionIds);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting metadata for catalog service transaction ids '" + catalogServiceTransactionIds + "' : " + e.getMessage(), e);
throw e;
}
}
public List<TransactionId<?>> getCatalogServiceTransactionIds(List<TransactionId<?>> catalogTransactionIds, String catalogId) throws CatalogServiceException {
try {
return this.catalogService.getCatalogServiceTransactionIds(catalogTransactionIds, catalogId);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting catalog service transaction ids for catalog transaction ids '" + catalogTransactionIds + "' from catalog '" + catalogId + "' : " + e.getMessage(), e);
throw e;
}
}
public TransactionId<?> getCatalogServiceTransactionId(TransactionId<?> catalogTransactionId, String catalogId) throws CatalogServiceException {
try {
return this.catalogService.getCatalogServiceTransactionId(catalogTransactionId, catalogId);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting catalog service transaction id for catalog transaction id '" + catalogTransactionId + "' from catalog '" + catalogId + "' : " + e.getMessage(), e);
throw e;
}
}
public TransactionId<?> getCatalogServiceTransactionId(CatalogReceipt catalogReceipt, boolean generateNew) throws CatalogServiceException {
try {
return this.catalogService.getCatalogServiceTransactionId(catalogReceipt, generateNew);
}catch (CatalogServiceException e) {
LOG.log(Level.SEVERE, "Failed while getting metadata for catalog service transaction id for catalog receipt '" + catalogReceipt + "' with generate new equal '" + generateNew + "' : " + e.getMessage(), e);
throw e;
}
}
}
| |
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//httpclient/src/test/org/apache/commons/httpclient/cookie/TestCookieNetscapeDraft.java,v 1.2 2004/04/24 23:28:04 olegk Exp $
* $Revision: 327789 $
* $Date: 2005-10-23 09:17:00 -0400 (Sun, 23 Oct 2005) $
* ====================================================================
*
* Copyright 1999-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.httpclient.cookie;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.commons.httpclient.Cookie;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.NameValuePair;
/**
* Test cases for Netscape cookie draft
*
* @author <a href="mailto:oleg@ural.ru">Oleg Kalnichevski</a>
*
* @version $Revision: 327789 $
*/
public class TestCookieNetscapeDraft extends TestCookieBase {
// ------------------------------------------------------------ Constructor
public TestCookieNetscapeDraft(String name) {
super(name);
}
// ------------------------------------------------------- TestCase Methods
public static Test suite() {
return new TestSuite(TestCookieNetscapeDraft.class);
}
public void testParseAttributeInvalidAttrib() throws Exception {
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
cookiespec.parseAttribute(null, null);
fail("IllegalArgumentException must have been thrown");
} catch (IllegalArgumentException expected) {
}
}
public void testParseAttributeInvalidCookie() throws Exception {
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
cookiespec.parseAttribute(new NameValuePair("name", "value"), null);
fail("IllegalArgumentException must have been thrown");
} catch (IllegalArgumentException expected) {
}
}
public void testParseAttributeInvalidCookieExpires() throws Exception {
CookieSpec cookiespec = new NetscapeDraftSpec();
Cookie cookie = new Cookie();
try {
cookiespec.parseAttribute(new NameValuePair("expires", null), cookie);
fail("MalformedCookieException must have been thrown");
} catch (MalformedCookieException expected) {
}
}
public void testParseWithNullHost() throws Exception {
Header header = new Header("Set-Cookie",
"cookie-name=cookie-value; domain=127.0.0.1; path=/; secure");
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
Cookie[] parsed = cookieParse(cookiespec, null, 80, "/", false, header);
fail("IllegalArgumentException should have been thrown");
} catch (IllegalArgumentException e) {
// expected
}
}
public void testParseWithBlankHost() throws Exception {
Header header = new Header("Set-Cookie",
"cookie-name=cookie-value; domain=127.0.0.1; path=/; secure");
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
Cookie[] parsed = cookieParse(cookiespec, " ", 80, "/", false, header);
fail("IllegalArgumentException should have been thrown");
} catch (IllegalArgumentException e) {
// expected
}
}
public void testParseWithNullPath() throws Exception {
Header header = new Header("Set-Cookie",
"cookie-name=cookie-value; domain=127.0.0.1; path=/; secure");
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
Cookie[] parsed = cookieParse(cookiespec, "127.0.0.1", 80, null, false, header);
fail("IllegalArgumentException should have been thrown");
} catch (IllegalArgumentException e) {
// expected
}
}
public void testParseWithBlankPath() throws Exception {
Header header = new Header("Set-Cookie",
"cookie-name=cookie-value; domain=127.0.0.1; path=/; secure");
CookieSpec cookiespec = new NetscapeDraftSpec();
Cookie[] parsed = cookieParse(cookiespec, "127.0.0.1", 80, " ", false, header);
assertNotNull(parsed);
assertEquals(1, parsed.length);
assertEquals("/", parsed[0].getPath());
}
public void testParseWithNegativePort() throws Exception {
Header header = new Header("Set-Cookie",
"cookie-name=cookie-value; domain=127.0.0.1; path=/; secure");
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
Cookie[] parsed = cookieParse(cookiespec, "127.0.0.1", -80, null, false, header);
fail("IllegalArgumentException should have been thrown");
} catch (IllegalArgumentException e) {
// expected
}
}
public void testParseWithInvalidHeader1() throws Exception {
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
Cookie[] parsed = cookiespec.parse("127.0.0.1", 80, "/foo", false, (String)null);
fail("IllegalArgumentException should have been thrown.");
} catch (IllegalArgumentException e) {
// expected
}
}
public void testParseAbsPath() throws Exception {
Header header = new Header("Set-Cookie", "name1=value1;Path=/path/");
CookieSpec cookiespec = new NetscapeDraftSpec();
Cookie[] parsed = cookieParse(cookiespec, "host", 80, "/path/", true, header);
assertEquals("Found 1 cookies.",1,parsed.length);
assertEquals("Name","name1",parsed[0].getName());
assertEquals("Value","value1",parsed[0].getValue());
assertEquals("Domain","host",parsed[0].getDomain());
assertEquals("Path","/path/",parsed[0].getPath());
}
public void testParseAbsPath2() throws Exception {
Header header = new Header("Set-Cookie", "name1=value1;Path=/");
CookieSpec cookiespec = new NetscapeDraftSpec();
Cookie[] parsed = cookieParse(cookiespec, "host", 80, "/", true, header);
assertEquals("Found 1 cookies.",1,parsed.length);
assertEquals("Name","name1",parsed[0].getName());
assertEquals("Value","value1",parsed[0].getValue());
assertEquals("Domain","host",parsed[0].getDomain());
assertEquals("Path","/",parsed[0].getPath());
}
public void testParseRelativePath() throws Exception {
Header header = new Header("Set-Cookie", "name1=value1;Path=whatever");
CookieSpec cookiespec = new NetscapeDraftSpec();
Cookie[] parsed = cookieParse(cookiespec, "host", 80, "whatever", true, header);
assertEquals("Found 1 cookies.",1,parsed.length);
assertEquals("Name","name1",parsed[0].getName());
assertEquals("Value","value1",parsed[0].getValue());
assertEquals("Domain","host",parsed[0].getDomain());
assertEquals("Path","whatever",parsed[0].getPath());
}
public void testParseWithIllegalNetscapeDomain1() throws Exception {
Header header = new Header("Set-Cookie","cookie-name=cookie-value; domain=.com");
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
Cookie[] parsed = cookieParse(cookiespec, "a.com", 80, "/", false, header);
fail("HttpException exception should have been thrown");
} catch (HttpException e) {
// expected
}
}
public void testParseWithWrongNetscapeDomain2() throws Exception {
Header header = new Header("Set-Cookie","cookie-name=cookie-value; domain=.y.z");
CookieSpec cookiespec = new NetscapeDraftSpec();
try {
Cookie[] parsed = cookieParse(cookiespec, "x.y.z", 80, "/", false, header);
fail("HttpException exception should have been thrown");
} catch (HttpException e) {
// expected
}
}
/**
* Tests Netscape specific cookie formatting.
*/
public void testNetscapeCookieFormatting() throws Exception {
Header header = new Header(
"Set-Cookie", "name=value; path=/; domain=.mydomain.com");
CookieSpec cookiespec = new NetscapeDraftSpec();
Cookie[] cookies = cookiespec.parse("myhost.mydomain.com", 80, "/", false, header );
cookiespec.validate("myhost.mydomain.com", 80, "/", false, cookies[0]);
String s = cookiespec.formatCookie(cookies[0]);
assertEquals("name=value", s);
}
/**
* Tests Netscape specific expire attribute parsing.
*/
public void testNetscapeCookieExpireAttribute() throws Exception {
CookieSpec cookiespec = new NetscapeDraftSpec();
Header header = new Header("Set-Cookie",
"name=value; path=/; domain=.mydomain.com; expires=Thu, 01-Jan-2070 00:00:10 GMT; comment=no_comment");
Cookie[] cookies = cookiespec.parse("myhost.mydomain.com", 80, "/", false, header );
cookiespec.validate("myhost.mydomain.com", 80, "/", false, cookies[0]);
header = new Header("Set-Cookie",
"name=value; path=/; domain=.mydomain.com; expires=Thu 01-Jan-2070 00:00:10 GMT; comment=no_comment");
try {
cookies = cookiespec.parse("myhost.mydomain.com", 80, "/", false, header );
cookiespec.validate("myhost.mydomain.com", 80, "/", false, cookies[0]);
fail("MalformedCookieException must have been thrown");
}
catch (MalformedCookieException expected) {
}
}
/**
* Tests Netscape specific expire attribute without a time zone.
*/
public void testNetscapeCookieExpireAttributeNoTimeZone() throws Exception {
CookieSpec cookiespec = new NetscapeDraftSpec();
Header header = new Header("Set-Cookie",
"name=value; expires=Thu, 01-Jan-2006 00:00:00 ");
try {
cookiespec.parse("myhost.mydomain.com", 80, "/", false, header );
fail("MalformedCookieException should have been thrown");
} catch (MalformedCookieException ex) {
// expected
}
}
/**
* Tests if cookie values with embedded comma are handled correctly.
*/
public void testCookieWithComma() throws Exception {
Header header = new Header("Set-Cookie", "a=b,c");
CookieSpec cookiespec = new NetscapeDraftSpec();
Cookie[] cookies = cookiespec.parse("localhost", 80, "/", false, header);
assertEquals("number of cookies", 1, cookies.length);
assertEquals("a", cookies[0].getName());
assertEquals("b,c", cookies[0].getValue());
}
}
| |
/*
* Copyright 2011 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.widgets.decoratedgrid.data;
import org.junit.Before;
import org.junit.Test;
import org.kie.workbench.common.widgets.decoratedgrid.client.widget.CellValue;
import org.kie.workbench.common.widgets.decoratedgrid.client.widget.data.Coordinate;
import static junit.framework.Assert.assertEquals;
/**
* Tests for DynamicData
*/
public class DynamicDataTestsWithMergingRowAdditions extends BaseDynamicDataTests {
@Before
public void setup() {
super.setup();
//Setup date to merge
//[1][-][3]
//[1][2][3]
//[-][2][3]
data.get( 0 ).get( 0 ).setValue( "1" );
data.get( 0 ).get( 1 ).setValue( "-" );
data.get( 0 ).get( 2 ).setValue( "3" );
data.get( 1 ).get( 0 ).setValue( "1" );
data.get( 1 ).get( 1 ).setValue( "2" );
data.get( 1 ).get( 2 ).setValue( "3" );
data.get( 2 ).get( 0 ).setValue( "-" );
data.get( 2 ).get( 1 ).setValue( "2" );
data.get( 2 ).get( 2 ).setValue( "3" );
}
@Test
public void testIndexing_DataCoordinates() {
//[1][-][3] --> [0,0][0,1][0,2]
//[1][2][3] --> [1,0][1,1][1,2]
//[-][2][3] --> [2,0][2,1][2,2]
data.setMerged( true );
Coordinate c;
c = data.get( 0 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
data.addRow( 1,
makeRow() );
assertEquals( data.size(),
4 );
//[1][-][3] --> [0,0][0,1][0,2]
//[-][-][-] --> [1,0][1,1][1,2]
//[1][2][3] --> [2,0][2,1][2,2]
//[-][2][3] --> [3,0][3,1][3,2]
c = data.get( 0 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
c = data.get( 3 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
3 );
assertEquals( c.getCol(),
0 );
c = data.get( 3 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
3 );
assertEquals( c.getCol(),
1 );
c = data.get( 3 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
3 );
assertEquals( c.getCol(),
2 );
}
@Test
public void testIndexing_HtmlCoordinates() {
//[1][-][3] --> [0,0][0,1][0,2]
//[1][2][3] --> [0,0][1,0][0,2]
//[-][2][3] --> [2,0][1,0][0,2]
data.setMerged( true );
Coordinate c;
c = data.get( 0 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
data.addRow( 1,
makeRow() );
assertEquals( data.size(),
4 );
//[1][-][3] --> [0,0][0,1][0,2]
//[-][-][-] --> [1,0][1,1][1,2]
//[1][2][3] --> [2,0][2,1][2,2]
//[-][2][3] --> [3,0][2,1][2,2]
c = data.get( 0 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
c = data.get( 3 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
3 );
assertEquals( c.getCol(),
0 );
c = data.get( 3 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 3 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
}
@Test
public void testIndexing_PhysicalCoordinates() {
//[1][-][3] --> [0,0][0,1][0,2] --> [0,0][0,1][0,2]
//[1][2][3] --> [0,0][1,0][0,2] --> [1,1][-,-][-,-]
//[-][2][3] --> [2,0][1,0][0,2] --> [2,0][-,-][-,-]
data.setMerged( true );
Coordinate c;
c = data.get( 0 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
data.addRow( 1,
makeRow() );
assertEquals( data.size(),
4 );
//[1][-][3] --> [0,0][0,1][0,2] --> [0,0][0,1][0,2]
//[-][-][-] --> [1,0][1,1][1,2] --> [1,0][1,1][1,2]
//[1][2][3] --> [2,0][2,1][2,2] --> [2,0][2,1][2,2]
//[-][2][3] --> [3,0][2,1][2,2] --> [3,0][-,-][-,-]
c = data.get( 0 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
c = data.get( 3 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
3 );
assertEquals( c.getCol(),
0 );
}
@Test
public void testIndexing_RowSpans() {
//[1][-][3] --> [2][1][3]
//[1][2][3] --> [0][2][0]
//[-][2][3] --> [1][0][0]
data.setMerged( true );
CellValue<? extends Comparable<?>> cv;
cv = data.get( 0 ).get( 0 );
assertEquals( cv.getRowSpan(),
2 );
cv = data.get( 0 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 0 ).get( 2 );
assertEquals( cv.getRowSpan(),
3 );
cv = data.get( 1 ).get( 0 );
assertEquals( cv.getRowSpan(),
0 );
cv = data.get( 1 ).get( 1 );
assertEquals( cv.getRowSpan(),
2 );
cv = data.get( 1 ).get( 2 );
assertEquals( cv.getRowSpan(),
0 );
cv = data.get( 2 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 1 );
assertEquals( cv.getRowSpan(),
0 );
cv = data.get( 2 ).get( 2 );
assertEquals( cv.getRowSpan(),
0 );
data.addRow( 1,
makeRow() );
assertEquals( data.size(),
4 );
//[1][-][3] --> [1][1][1]
//[-][-][-] --> [1][1][1]
//[1][2][3] --> [1][2][2]
//[-][2][3] --> [1][0][0]
cv = data.get( 0 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 0 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 0 ).get( 2 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 2 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 1 );
assertEquals( cv.getRowSpan(),
2 );
cv = data.get( 2 ).get( 2 );
assertEquals( cv.getRowSpan(),
2 );
cv = data.get( 3 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 3 ).get( 1 );
assertEquals( cv.getRowSpan(),
0 );
cv = data.get( 3 ).get( 2 );
assertEquals( cv.getRowSpan(),
0 );
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.NavigableSet;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
import org.apache.hadoop.hbase.util.Bytes;
/**
* This class is used for the tracking and enforcement of columns and numbers
* of versions during the course of a Get or Scan operation, when explicit
* column qualifiers have been asked for in the query.
*
* With a little magic (see {@link ScanQueryMatcher}), we can use this matcher
* for both scans and gets. The main difference is 'next' and 'done' collapse
* for the scan case (since we see all columns in order), and we only reset
* between rows.
*
* <p>
* This class is utilized by {@link ScanQueryMatcher} mainly through two methods:
* <ul><li>{@link #checkColumn} is called when a Put satisfies all other
* conditions of the query.
* <ul><li>{@link #getNextRowOrNextColumn} is called whenever ScanQueryMatcher
* believes that the current column should be skipped (by timestamp, filter etc.)
* <p>
* These two methods returns a
* {@link org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode}
* to define what action should be taken.
* <p>
* This class is NOT thread-safe as queries are never multi-threaded
*/
@InterfaceAudience.Private
public class ExplicitColumnTracker implements ColumnTracker {
private final int maxVersions;
private final int minVersions;
// hint for the tracker about how many KVs we will attempt to search via next()
// before we schedule a (re)seek operation
private final int lookAhead;
/**
* Contains the list of columns that the ExplicitColumnTracker is tracking.
* Each ColumnCount instance also tracks how many versions of the requested
* column have been returned.
*/
private final ColumnCount[] columns;
private int index;
private ColumnCount column;
/** Keeps track of the latest timestamp included for current column.
* Used to eliminate duplicates. */
private long latestTSOfCurrentColumn;
private long oldestStamp;
private int skipCount;
/**
* Default constructor.
* @param columns columns specified user in query
* @param minVersions minimum number of versions to keep
* @param maxVersions maximum versions to return per column
* @param oldestUnexpiredTS the oldest timestamp we are interested in,
* based on TTL
* @param lookAhead number of KeyValues to look ahead via next before
* (re)seeking
*/
public ExplicitColumnTracker(NavigableSet<byte[]> columns, int minVersions,
int maxVersions, long oldestUnexpiredTS, int lookAhead) {
this.maxVersions = maxVersions;
this.minVersions = minVersions;
this.lookAhead = lookAhead;
this.oldestStamp = oldestUnexpiredTS;
this.columns = new ColumnCount[columns.size()];
int i=0;
for(byte [] column : columns) {
this.columns[i++] = new ColumnCount(column);
}
reset();
}
/**
* Done when there are no more columns to match against.
*/
public boolean done() {
return this.index >= columns.length;
}
public ColumnCount getColumnHint() {
return this.column;
}
/**
* {@inheritDoc}
*/
@Override
public ScanQueryMatcher.MatchCode checkColumn(byte [] bytes, int offset,
int length, byte type) {
// delete markers should never be passed to an
// *Explicit*ColumnTracker
assert !KeyValue.isDelete(type);
do {
// No more columns left, we are done with this query
if(done()) {
return ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW; // done_row
}
// No more columns to match against, done with storefile
if(this.column == null) {
return ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW; // done_row
}
// Compare specific column to current column
int ret = Bytes.compareTo(column.getBuffer(), column.getOffset(),
column.getLength(), bytes, offset, length);
// Column Matches. Return include code. The caller would call checkVersions
// to limit the number of versions.
if(ret == 0) {
return ScanQueryMatcher.MatchCode.INCLUDE;
}
resetTS();
if (ret > 0) {
// The current KV is smaller than the column the ExplicitColumnTracker
// is interested in, so seek to that column of interest.
return this.skipCount++ < this.lookAhead ? ScanQueryMatcher.MatchCode.SKIP
: ScanQueryMatcher.MatchCode.SEEK_NEXT_COL;
}
// The current KV is bigger than the column the ExplicitColumnTracker
// is interested in. That means there is no more data for the column
// of interest. Advance the ExplicitColumnTracker state to next
// column of interest, and check again.
if (ret <= -1) {
++this.index;
this.skipCount = 0;
if (done()) {
// No more to match, do not include, done with this row.
return ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW; // done_row
}
// This is the recursive case.
this.column = this.columns[this.index];
}
} while(true);
}
@Override
public ScanQueryMatcher.MatchCode checkVersions(byte[] bytes, int offset, int length,
long timestamp, byte type, boolean ignoreCount) throws IOException {
assert !KeyValue.isDelete(type);
if (ignoreCount) return ScanQueryMatcher.MatchCode.INCLUDE;
// Check if it is a duplicate timestamp
if (sameAsPreviousTS(timestamp)) {
// If duplicate, skip this Key
return ScanQueryMatcher.MatchCode.SKIP;
}
int count = this.column.increment();
if (count >= maxVersions || (count >= minVersions && isExpired(timestamp))) {
// Done with versions for this column
++this.index;
this.skipCount = 0;
resetTS();
if (done()) {
// We have served all the requested columns.
this.column = null;
return ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW;
}
// We are done with current column; advance to next column
// of interest.
this.column = this.columns[this.index];
return ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL;
}
setTS(timestamp);
return ScanQueryMatcher.MatchCode.INCLUDE;
}
// Called between every row.
public void reset() {
this.index = 0;
this.skipCount = 0;
this.column = this.columns[this.index];
for(ColumnCount col : this.columns) {
col.setCount(0);
}
resetTS();
}
private void resetTS() {
latestTSOfCurrentColumn = HConstants.LATEST_TIMESTAMP;
}
private void setTS(long timestamp) {
latestTSOfCurrentColumn = timestamp;
}
private boolean sameAsPreviousTS(long timestamp) {
return timestamp == latestTSOfCurrentColumn;
}
private boolean isExpired(long timestamp) {
return timestamp < oldestStamp;
}
/**
* This method is used to inform the column tracker that we are done with
* this column. We may get this information from external filters or
* timestamp range and we then need to indicate this information to
* tracker. It is required only in case of ExplicitColumnTracker.
* @param bytes
* @param offset
* @param length
*/
public void doneWithColumn(byte [] bytes, int offset, int length) {
while (this.column != null) {
int compare = Bytes.compareTo(column.getBuffer(), column.getOffset(),
column.getLength(), bytes, offset, length);
resetTS();
if (compare <= 0) {
++this.index;
this.skipCount = 0;
if (done()) {
// Will not hit any more columns in this storefile
this.column = null;
} else {
this.column = this.columns[this.index];
}
if (compare <= -1)
continue;
}
return;
}
}
public MatchCode getNextRowOrNextColumn(byte[] bytes, int offset,
int qualLength) {
doneWithColumn(bytes, offset,qualLength);
if (getColumnHint() == null) {
return MatchCode.SEEK_NEXT_ROW;
} else {
return MatchCode.SEEK_NEXT_COL;
}
}
public boolean isDone(long timestamp) {
return minVersions <= 0 && isExpired(timestamp);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kr.co.bitnine.octopus;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.RowIdLifetime;
import java.sql.SQLException;
abstract class AbstractDatabaseMetaData implements DatabaseMetaData {
@Override
public boolean allProceduresAreCallable() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean allTablesAreSelectable() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getURL() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getUserName() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean isReadOnly() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean nullsAreSortedHigh() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean nullsAreSortedLow() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean nullsAreSortedAtStart() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean nullsAreSortedAtEnd() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getDatabaseProductName() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getDatabaseProductVersion() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getDriverName() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getDriverVersion() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getDriverMajorVersion() {
return 0;
}
@Override
public int getDriverMinorVersion() {
return 0;
}
@Override
public boolean usesLocalFiles() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean usesLocalFilePerTable() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsMixedCaseIdentifiers() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean storesUpperCaseIdentifiers() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean storesLowerCaseIdentifiers() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean storesMixedCaseIdentifiers() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean storesUpperCaseQuotedIdentifiers() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean storesLowerCaseQuotedIdentifiers() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean storesMixedCaseQuotedIdentifiers() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getIdentifierQuoteString() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getSQLKeywords() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getNumericFunctions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getStringFunctions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getSystemFunctions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getTimeDateFunctions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getSearchStringEscape() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getExtraNameCharacters() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsAlterTableWithAddColumn() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsAlterTableWithDropColumn() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsColumnAliasing() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean nullPlusNonNullIsNull() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsConvert() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsConvert(int i, int i1) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsTableCorrelationNames() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsDifferentTableCorrelationNames() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsExpressionsInOrderBy() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsOrderByUnrelated() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsGroupBy() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsGroupByUnrelated() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsGroupByBeyondSelect() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsLikeEscapeClause() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsMultipleResultSets() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsMultipleTransactions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsNonNullableColumns() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsMinimumSQLGrammar() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsCoreSQLGrammar() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsExtendedSQLGrammar() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsANSI92EntryLevelSQL() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsANSI92IntermediateSQL() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsANSI92FullSQL() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsIntegrityEnhancementFacility() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsOuterJoins() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsFullOuterJoins() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsLimitedOuterJoins() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getSchemaTerm() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getProcedureTerm() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getCatalogTerm() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean isCatalogAtStart() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public String getCatalogSeparator() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSchemasInDataManipulation() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSchemasInProcedureCalls() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSchemasInTableDefinitions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSchemasInIndexDefinitions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsCatalogsInDataManipulation() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsCatalogsInProcedureCalls() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsCatalogsInTableDefinitions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsCatalogsInIndexDefinitions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsPositionedDelete() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsPositionedUpdate() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSelectForUpdate() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsStoredProcedures() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSubqueriesInComparisons() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSubqueriesInExists() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSubqueriesInIns() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSubqueriesInQuantifieds() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsCorrelatedSubqueries() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsUnion() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsUnionAll() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsOpenCursorsAcrossCommit() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsOpenCursorsAcrossRollback() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsOpenStatementsAcrossCommit() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsOpenStatementsAcrossRollback() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxBinaryLiteralLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxCharLiteralLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxColumnNameLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxColumnsInGroupBy() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxColumnsInIndex() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxColumnsInOrderBy() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxColumnsInSelect() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxColumnsInTable() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxConnections() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxCursorNameLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxIndexLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxSchemaNameLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxProcedureNameLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxCatalogNameLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxRowSize() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean doesMaxRowSizeIncludeBlobs() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxStatementLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxStatements() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxTableNameLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxTablesInSelect() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getMaxUserNameLength() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getDefaultTransactionIsolation() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsTransactions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsTransactionIsolationLevel(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsDataManipulationTransactionsOnly() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean dataDefinitionCausesTransactionCommit() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean dataDefinitionIgnoredInTransactions() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getProcedures(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getProcedureColumns(String s, String s1, String s2, String s3) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getTables(String s, String s1, String s2, String[] strings) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getSchemas() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getSchemas(String s, String s1) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getCatalogs() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getTableTypes() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getColumns(String s, String s1, String s2, String s3) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getColumnPrivileges(String s, String s1, String s2, String s3) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getTablePrivileges(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getBestRowIdentifier(String s, String s1, String s2, int i, boolean b) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getVersionColumns(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getPrimaryKeys(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getImportedKeys(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getExportedKeys(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getCrossReference(String s, String s1, String s2, String s3, String s4, String s5) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getTypeInfo() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getIndexInfo(String s, String s1, String s2, boolean b, boolean b1) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsResultSetType(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsResultSetConcurrency(int i, int i1) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean ownUpdatesAreVisible(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean ownDeletesAreVisible(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean ownInsertsAreVisible(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean othersUpdatesAreVisible(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean othersDeletesAreVisible(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean othersInsertsAreVisible(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean updatesAreDetected(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean deletesAreDetected(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean insertsAreDetected(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsBatchUpdates() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getUDTs(String s, String s1, String s2, int[] ints) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public Connection getConnection() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsSavepoints() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsNamedParameters() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsMultipleOpenResults() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsGetGeneratedKeys() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getSuperTypes(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getSuperTables(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getAttributes(String s, String s1, String s2, String s3) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsResultSetHoldability(int i) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getResultSetHoldability() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getDatabaseMajorVersion() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getDatabaseMinorVersion() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getJDBCMajorVersion() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getJDBCMinorVersion() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public int getSQLStateType() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean locatorsUpdateCopy() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsStatementPooling() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public RowIdLifetime getRowIdLifetime() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean autoCommitFailureClosesAllResultSets() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getClientInfoProperties() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getFunctions(String s, String s1, String s2) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getFunctionColumns(String s, String s1, String s2, String s3) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public ResultSet getPseudoColumns(String s, String s1, String s2, String s3) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean generatedKeyAlwaysReturned() throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public <T> T unwrap(Class<T> aClass) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
@Override
public boolean isWrapperFor(Class<?> aClass) throws SQLException {
throw new SQLException("not implemented", new UnsupportedOperationException());
}
}
| |
package cz.metacentrum.perun.core.impl.modules.attributes;
import cz.metacentrum.perun.core.api.Attribute;
import cz.metacentrum.perun.core.api.AttributeDefinition;
import cz.metacentrum.perun.core.api.AttributesManager;
import cz.metacentrum.perun.core.api.Group;
import cz.metacentrum.perun.core.api.Resource;
import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.ConsistencyErrorException;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException;
import cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException;
import cz.metacentrum.perun.core.impl.PerunSessionImpl;
import cz.metacentrum.perun.core.implApi.modules.attributes.ResourceAttributesModuleAbstract;
import cz.metacentrum.perun.core.implApi.modules.attributes.ResourceAttributesModuleImplApi;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Resource unixGID-namespace attribute.
*
* @author Michal Stava stavamichal@gmail.com
*/
public class urn_perun_resource_attribute_def_def_unixGID_namespace extends ResourceAttributesModuleAbstract implements ResourceAttributesModuleImplApi {
private final static Logger log = LoggerFactory.getLogger(urn_perun_resource_attribute_def_def_unixGID_namespace.class);
private static final String A_G_unixGID_namespace = AttributesManager.NS_GROUP_ATTR_DEF + ":unixGID-namespace";
private static final String A_R_unixGroupName_namespace = AttributesManager.NS_RESOURCE_ATTR_DEF + ":unixGroupName-namespace";
private static final String A_G_unixGroupName_namespace = AttributesManager.NS_GROUP_ATTR_DEF + ":unixGroupName-namespace";
private static final String A_E_usedGids = AttributesManager.NS_ENTITYLESS_ATTR_DEF + ":usedGids";
@Override
public Attribute fillAttribute(PerunSessionImpl sess, Resource resource, AttributeDefinition attributeDefinition) throws InternalErrorException, WrongAttributeAssignmentException {
Attribute attribute = new Attribute(attributeDefinition);
String gidNamespace = attribute.getFriendlyNameParameter();
//First I get all GroupNames of this resource (for any namespaces)
List <Attribute> groupNamesOfResource = sess.getPerunBl().getAttributesManagerBl().getAllAttributesStartWithNameWithoutNullValue(sess, resource, A_R_unixGroupName_namespace + ":");
//If there exist some groupName of this resource
if(!groupNamesOfResource.isEmpty()) {
//Get All Groups and Resources with some same GroupName in the same Namespace
List<Group> groupsWithSameGroupNameInSameNamespace = new ArrayList<>();
List<Resource> resourcesWithSameGroupNameInSameNamespace = new ArrayList<>();
for(Attribute attr: groupNamesOfResource) {
Attribute groupNameOfGroup;
try {
groupNameOfGroup = new Attribute(sess.getPerunBl().getAttributesManagerBl().getAttributeDefinition(sess, A_G_unixGroupName_namespace + ":" + attr.getFriendlyNameParameter()));
} catch (AttributeNotExistsException ex) {
throw new ConsistencyErrorException("AttributeDefinition for group_def_unixGroupName-namespace:" + attr.getFriendlyNameParameter() + " must exists", ex);
}
groupNameOfGroup.setValue(attr.getValue());
//Get all resources and groups with some GroupName same with same Namespace
groupsWithSameGroupNameInSameNamespace.addAll(sess.getPerunBl().getGroupsManagerBl().getGroupsByAttribute(sess, groupNameOfGroup));
resourcesWithSameGroupNameInSameNamespace.addAll(sess.getPerunBl().getResourcesManagerBl().getResourcesByAttribute(sess, attr));
}
//Test if exists common GID for this group and other groups and resources
Integer commonGID = sess.getPerunBl().getModulesUtilsBl().getCommonGIDOfGroupsWithSameNameInSameNamespace(sess, groupsWithSameGroupNameInSameNamespace, gidNamespace, null);
commonGID = sess.getPerunBl().getModulesUtilsBl().getCommonGIDOfResourcesWithSameNameInSameNamespace(sess, resourcesWithSameGroupNameInSameNamespace, gidNamespace, commonGID);
//If commonGID exists, set it
if(commonGID != null) {
attribute.setValue(commonGID);
return attribute;
}
}
//If commonGID not exists, try to set new one
try {
Integer freeGID = sess.getPerunBl().getModulesUtilsBl().getFreeGID(sess, attribute);
if(freeGID == null) {
//free GID not found
log.warn("Free unix gid not found for resource:[" + resource + "] in unix group namespace " + gidNamespace);
} else if(freeGID > 0 || freeGID < 0) {
//free GID found
attribute.setValue(freeGID);
}
return attribute;
} catch(AttributeNotExistsException ex) {
throw new ConsistencyErrorException(ex);
}
}
@Override
public void checkAttributeSemantics(PerunSessionImpl sess, Resource resource, Attribute attribute) throws InternalErrorException, WrongAttributeValueException, WrongReferenceAttributeValueException, WrongAttributeAssignmentException{
try{
String gidNamespace = attribute.getFriendlyNameParameter();
//Special behaviour if gid is null
Integer attrValue;
if(attribute.getValue() == null) {
throw new WrongAttributeValueException(attribute, resource, "Unix GID must be set");
} else {
attrValue = (Integer) attribute.getValue();
}
//Check if GID is within allowed range
sess.getPerunBl().getModulesUtilsBl().checkIfGIDIsWithinRange(sess, attribute);
//check if gid is not already depleted
Attribute usedGids = sess.getPerunBl().getAttributesManagerBl().getAttribute(sess, gidNamespace, A_E_usedGids);
//null in value means there is no depleted or used gids
if(usedGids.getValue() != null) {
Map<String, String> usedGidsValue = (Map<String,String>) usedGids.getValue();
//Dx, where x is GID means depleted value for GID x
if(usedGidsValue.containsKey("D" + attrValue.toString())) {
throw new WrongReferenceAttributeValueException(attribute, usedGids, resource, null, gidNamespace, null, "This GID is already depleted.");
}
}
//Prepare lists for all groups and resources with same GID in the same namespace
//Prepare attributes for searching through groups and resources
Attribute resourceGIDAttribute = attribute;
Attribute groupGIDAttribute = new Attribute(sess.getPerunBl().getAttributesManagerBl().getAttributeDefinition(sess, A_G_unixGID_namespace + ":" + gidNamespace));
groupGIDAttribute.setValue(resourceGIDAttribute.getValue());
//Fill lists of Groups and Resources by data
List<Group> allGroupsWithSameGIDInSameNamespace = new ArrayList<>(sess.getPerunBl().getGroupsManagerBl().getGroupsByAttribute(sess, groupGIDAttribute));
List<Resource> allResourcesWithSameGIDInSameNamespace = new ArrayList<>(sess.getPerunBl().getResourcesManagerBl().getResourcesByAttribute(sess, resourceGIDAttribute));
//remove this resource
allResourcesWithSameGIDInSameNamespace.remove(resource);
//Prepare list of GroupName attributes of this resource
List <Attribute> groupNamesOfResource = sess.getPerunBl().getAttributesManagerBl().getAllAttributesStartWithNameWithoutNullValue(sess, resource, A_R_unixGroupName_namespace + ":");
//Searching through groups
if(!allGroupsWithSameGIDInSameNamespace.isEmpty()) {
for(Group g: allGroupsWithSameGIDInSameNamespace) {
for(Attribute a: groupNamesOfResource) {
//Prepare group version of this group attribute
Attribute groupGroupName = new Attribute(sess.getPerunBl().getAttributesManagerBl().getAttributeDefinition(sess, A_G_unixGroupName_namespace + ":" + a.getFriendlyNameParameter()));
groupGroupName.setValue(a.getValue());
int compare = sess.getPerunBl().getModulesUtilsBl().haveTheSameAttributeWithTheSameNamespace(sess, g, groupGroupName);
if(compare > 0) {
//This is problem, there is the same attribute but have other value
throw new WrongReferenceAttributeValueException(attribute, a, "There is a group with same GID (namespace: " + gidNamespace + ") and different unix group name (namespace: " + a.getFriendlyNameParameter() + "). " + g + " " + resource);
}
//Other possibilities are not problem, less than 0 mean that same attribute not exists, and 0 mean that attribute exists but have same value
}
}
}
//Searching through resources
if(!allResourcesWithSameGIDInSameNamespace.isEmpty()) {
for(Resource r: allResourcesWithSameGIDInSameNamespace) {
for(Attribute a: groupNamesOfResource) {
int compare = sess.getPerunBl().getModulesUtilsBl().haveTheSameAttributeWithTheSameNamespace(sess, r, a);
if(compare > 0) {
//This is problem, there is the same attribute but have other value
throw new WrongReferenceAttributeValueException(attribute, a, "There is a resource with same GID (namespace: " + gidNamespace + ") and different unix group name (namespace: " + a.getFriendlyNameParameter() + "). " + r + " " + resource);
}
//Other possibilities are not problem, less than 0 mean that same attribute not exists, and 0 mean that attribute exists but have same value
}
}
}
} catch(AttributeNotExistsException ex) {
throw new ConsistencyErrorException(ex);
}
}
@Override
public void changedAttributeHook(PerunSessionImpl session, Resource resource, Attribute attribute) throws InternalErrorException, WrongReferenceAttributeValueException {
String gidNamespace = attribute.getFriendlyNameParameter();
//get attribute with usedGids for update
//IMPORTANT: for update lock row in table of attr values, be careful when using
Attribute usedGids;
try {
usedGids = session.getPerunBl().getAttributesManagerBl().getEntitylessAttributeForUpdate(session, gidNamespace, A_E_usedGids);
} catch (AttributeNotExistsException ex) {
throw new ConsistencyErrorException(ex);
}
//Get Map of gids (if there is no value, use empty map
Map<String, String> usedGidsValue = new LinkedHashMap<>();
if(usedGids.getValue() != null) usedGidsValue = (Map<String,String>) usedGids.getValue();
//initial settings
String key = "R" + resource.getId();
String oldGid = usedGidsValue.get(key);
//for removing gid
if(attribute.getValue() == null) {
//remove record from map
if(oldGid != null) {
usedGidsValue.remove(key);
//looking for another oldGid value, if not exists, add depleted record
if(!usedGidsValue.containsValue(oldGid)) {
usedGidsValue.put("D" + oldGid, oldGid);
}
}
//for setting gid
} else {
String newUnixGid = ((Integer) attribute.getValue()).toString();
//add new record to map
usedGidsValue.put(key, newUnixGid);
//looking for another oldGid value, if not exists, add depleted record
if(oldGid != null && !usedGidsValue.containsValue(oldGid)) {
usedGidsValue.put("D" + oldGid, oldGid);
}
}
//set new attribute value for usedGids
usedGids.setValue(usedGidsValue);
try {
session.getPerunBl().getAttributesManagerBl().setAttribute(session, gidNamespace, usedGids);
} catch (WrongAttributeValueException ex) {
throw new WrongReferenceAttributeValueException(attribute, usedGids, ex);
} catch (WrongAttributeAssignmentException ex) {
throw new InternalErrorException(ex);
}
}
@Override
public List<String> getDependencies() {
List<String> dependencies = new ArrayList<>();
//Disallowed because of crosschecks between modules and performance reason
//dependencies.add(A_R_unixGroupName_namespace + ":*");
//dependencies.add(A_G_unixGID_namespace + ":*");
//dependencies.add(A_G_unixGroupName_namespace + ":*");
//Temporary disallowed for performance reason
//dependencies.add(A_E_usedGids);
return dependencies;
}
/*public AttributeDefinition getAttributeDefinition() {
AttributeDefinition attr = new AttributeDefinition();
attr.setNamespace(AttributesManager.NS_RESOURCE_ATTR_DEF);
attr.setFriendlyName("unixGID-namespace");
attr.setType(Integer.class.getName());
attr.setDescription("Unix GID namespace.");
return attr;
}*/
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicInteger;
import javax.cache.Cache;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.binary.BinaryObject;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheEntry;
import org.apache.ignite.cache.QueryEntity;
import org.apache.ignite.cache.QueryIndex;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.cache.query.SqlQuery;
import org.apache.ignite.cache.query.annotations.QuerySqlField;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
/**
*
*/
@SuppressWarnings("unchecked")
public class CacheBinaryKeyConcurrentQueryTest extends GridCommonAbstractTest {
/** */
private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
private static final int NODES = 3;
/** */
private static final int KEYS = 1000;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder);
cfg.setMarshaller(null);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
startGridsMultiThreaded(NODES);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
super.afterTestsStopped();
}
/**
* @throws Exception If failed.
*/
public void testPutAndQueries() throws Exception {
Ignite ignite = ignite(0);
IgniteCache cache1 = ignite.createCache(cacheConfiguration("cache1", ATOMIC));
IgniteCache cache2 = ignite.createCache(cacheConfiguration("cache2", TRANSACTIONAL));
insertData(ignite, cache1.getName());
insertData(ignite, cache2.getName());
IgniteInternalFuture<?> fut1 = startUpdate(cache1.getName());
IgniteInternalFuture<?> fut2 = startUpdate(cache2.getName());
fut1.get();
fut2.get();
}
/**
* @param cacheName Cache name.
* @return Future.
*/
private IgniteInternalFuture<?> startUpdate(final String cacheName) {
final long stopTime = System.currentTimeMillis() + 30_000;
final AtomicInteger idx = new AtomicInteger();
return GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
@Override public Void call() {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
IgniteCache cache = ignite(idx.getAndIncrement() % NODES).cache(cacheName).withKeepBinary();
while (System.currentTimeMillis() < stopTime) {
switch (rnd.nextInt(5)) {
case 0: {
TestKey key = new TestKey(rnd.nextInt(KEYS));
CacheEntry e = cache.getEntry(key);
assertNotNull(e);
assertTrue(e.getKey() instanceof BinaryObject);
cache.put(e.getKey(), new TestValue(rnd.nextInt(KEYS)));
break;
}
case 1: {
Iterator<Cache.Entry> it = cache.iterator();
for (int i = 0; i < 100 && it.hasNext(); i++) {
Cache.Entry e = it.next();
assertTrue(e.getKey() instanceof BinaryObject);
cache.put(e.getKey(), new TestValue(rnd.nextInt(KEYS)));
}
break;
}
case 2: {
SqlFieldsQuery qry = new SqlFieldsQuery("select _key " +
"from \"" + cache.getName() + "\".TestValue where id=?");
qry.setArgs(rnd.nextInt(KEYS));
List<List> res = cache.query(qry).getAll();
assertEquals(1, res.size());
BinaryObject key = (BinaryObject)res.get(0).get(0);
cache.put(key, new TestValue(rnd.nextInt(KEYS)));
break;
}
case 3: {
SqlQuery qry = new SqlQuery("TestValue", "id=?");
qry.setArgs(rnd.nextInt(KEYS));
List<Cache.Entry> res = cache.query(qry).getAll();
assertEquals(1, res.size());
break;
}
case 4: {
SqlQuery qry = new SqlQuery("TestValue", "order by id");
int cnt = 0;
for (Cache.Entry e : (Iterable<Cache.Entry>)cache.query(qry)) {
assertNotNull(cache.get(e.getKey()));
cnt++;
}
assertTrue(cnt > 0);
break;
}
default:
fail();
}
}
return null;
}
}, NODES * 2, "test-thread");
}
/**
* @param ignite Node.
* @param cacheName Cache name.
*/
private void insertData(Ignite ignite, String cacheName) {
try (IgniteDataStreamer streamer = ignite.dataStreamer(cacheName)) {
for (int i = 0; i < KEYS; i++)
streamer.addData(new TestKey(i), new TestValue(i));
}
}
/**
* @param name Cache name.
* @param atomicityMode Cache atomicity mode.
* @return Cache configuration.
*/
private CacheConfiguration cacheConfiguration(String name, CacheAtomicityMode atomicityMode) {
CacheConfiguration ccfg = new CacheConfiguration();
ccfg.setName(name);
ccfg.setCacheMode(PARTITIONED);
ccfg.setWriteSynchronizationMode(FULL_SYNC);
ccfg.setAtomicityMode(atomicityMode);
ccfg.setBackups(1);
QueryEntity qryEntity = new QueryEntity();
qryEntity.setKeyType(TestKey.class.getName());
qryEntity.setValueType(TestValue.class.getName());
qryEntity.addQueryField("id", Integer.class.getName(), null);
qryEntity.addQueryField("val", Integer.class.getName(), null);
qryEntity.setIndexes(F.asList(new QueryIndex("id"), new QueryIndex("val")));
ccfg.setQueryEntities(F.asList(qryEntity));
return ccfg;
}
/**
*
*/
static class TestKey {
/** */
@QuerySqlField(index = true)
private int id;
/**
* @param id ID.
*/
public TestKey(int id) {
this.id = id;
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
TestKey testKey = (TestKey)o;
return id == testKey.id;
}
/** {@inheritDoc} */
@Override public int hashCode() {
return id;
}
}
/**
*
*/
static class TestValue {
/** */
@QuerySqlField(index = true)
private int val;
/**
* @param val Value.
*/
public TestValue(int val) {
this.val = val;
}
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* InstanceTypeSetItemType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST)
*/
package com.amazon.ec2;
/**
* InstanceTypeSetItemType bean class
*/
public class InstanceTypeSetItemType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = InstanceTypeSetItemType
Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for InstanceType
*/
protected java.lang.String localInstanceType ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getInstanceType(){
return localInstanceType;
}
/**
* Auto generated setter method
* @param param InstanceType
*/
public void setInstanceType(java.lang.String param){
this.localInstanceType=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
InstanceTypeSetItemType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":InstanceTypeSetItemType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"InstanceTypeSetItemType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"instanceType", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"instanceType");
}
} else {
xmlWriter.writeStartElement("instanceType");
}
if (localInstanceType==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("instanceType cannot be null!!");
}else{
xmlWriter.writeCharacters(localInstanceType);
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"instanceType"));
if (localInstanceType != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localInstanceType));
} else {
throw new org.apache.axis2.databinding.ADBException("instanceType cannot be null!!");
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static InstanceTypeSetItemType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
InstanceTypeSetItemType object =
new InstanceTypeSetItemType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"InstanceTypeSetItemType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (InstanceTypeSetItemType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","instanceType").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setInstanceType(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.server.sample.processor;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.List;
import java.util.Locale;
import org.apache.olingo.commons.api.data.ContextURL;
import org.apache.olingo.commons.api.data.ContextURL.Suffix;
import org.apache.olingo.commons.api.data.Entity;
import org.apache.olingo.commons.api.data.EntityCollection;
import org.apache.olingo.commons.api.data.Property;
import org.apache.olingo.commons.api.edm.EdmComplexType;
import org.apache.olingo.commons.api.edm.EdmEntitySet;
import org.apache.olingo.commons.api.edm.EdmPrimitiveType;
import org.apache.olingo.commons.api.edm.EdmProperty;
import org.apache.olingo.commons.api.format.ContentType;
import org.apache.olingo.commons.api.http.HttpHeader;
import org.apache.olingo.commons.api.http.HttpStatusCode;
import org.apache.olingo.server.api.OData;
import org.apache.olingo.server.api.ODataApplicationException;
import org.apache.olingo.server.api.ODataLibraryException;
import org.apache.olingo.server.api.ODataRequest;
import org.apache.olingo.server.api.ODataResponse;
import org.apache.olingo.server.api.ServiceMetadata;
import org.apache.olingo.server.api.deserializer.DeserializerException;
import org.apache.olingo.server.api.processor.ComplexProcessor;
import org.apache.olingo.server.api.processor.EntityCollectionProcessor;
import org.apache.olingo.server.api.processor.EntityProcessor;
import org.apache.olingo.server.api.processor.PrimitiveProcessor;
import org.apache.olingo.server.api.processor.PrimitiveValueProcessor;
import org.apache.olingo.server.api.serializer.ComplexSerializerOptions;
import org.apache.olingo.server.api.serializer.EntityCollectionSerializerOptions;
import org.apache.olingo.server.api.serializer.EntitySerializerOptions;
import org.apache.olingo.server.api.serializer.ODataSerializer;
import org.apache.olingo.server.api.serializer.PrimitiveSerializerOptions;
import org.apache.olingo.server.api.serializer.SerializerException;
import org.apache.olingo.server.api.uri.UriInfo;
import org.apache.olingo.server.api.uri.UriInfoResource;
import org.apache.olingo.server.api.uri.UriResource;
import org.apache.olingo.server.api.uri.UriResourceEntitySet;
import org.apache.olingo.server.api.uri.UriResourceProperty;
import org.apache.olingo.server.api.uri.queryoption.ExpandOption;
import org.apache.olingo.server.api.uri.queryoption.SelectOption;
import org.apache.olingo.server.sample.data.DataProvider;
import org.apache.olingo.server.sample.data.DataProvider.DataProviderException;
/**
* This processor will deliver entity collections, single entities as well as properties of an entity.
* This is a very simple example which should give you a rough guideline on how to implement such an processor.
* See the JavaDoc of the server.api interfaces for more information.
*/
public class CarsProcessor implements EntityCollectionProcessor, EntityProcessor,
PrimitiveProcessor, PrimitiveValueProcessor, ComplexProcessor {
private OData odata;
private final DataProvider dataProvider;
private ServiceMetadata edm;
// This constructor is application specific and not mandatory for the Olingo library. We use it here to simulate the
// database access
public CarsProcessor(final DataProvider dataProvider) {
this.dataProvider = dataProvider;
}
@Override
public void init(OData odata, ServiceMetadata edm) {
this.odata = odata;
this.edm = edm;
}
@Override
public void readEntityCollection(final ODataRequest request, ODataResponse response, final UriInfo uriInfo,
final ContentType requestedContentType) throws ODataApplicationException, SerializerException {
// First we have to figure out which entity set to use
final EdmEntitySet edmEntitySet = getEdmEntitySet(uriInfo.asUriInfoResource());
// Second we fetch the data for this specific entity set from the mock database and transform it into an EntitySet
// object which is understood by our serialization
EntityCollection entitySet = dataProvider.readAll(edmEntitySet);
// Next we create a serializer based on the requested format. This could also be a custom format but we do not
// support them in this example
ODataSerializer serializer = odata.createSerializer(requestedContentType);
// Now the content is serialized using the serializer.
final ExpandOption expand = uriInfo.getExpandOption();
final SelectOption select = uriInfo.getSelectOption();
final String id = request.getRawBaseUri() + "/" + edmEntitySet.getName();
InputStream serializedContent = serializer.entityCollection(edm, edmEntitySet.getEntityType(), entitySet,
EntityCollectionSerializerOptions.with()
.id(id)
.contextURL(isODataMetadataNone(requestedContentType) ? null :
getContextUrl(edmEntitySet, false, expand, select, null))
.count(uriInfo.getCountOption())
.expand(expand).select(select)
.build()).getContent();
// Finally we set the response data, headers and status code
response.setContent(serializedContent);
response.setStatusCode(HttpStatusCode.OK.getStatusCode());
response.setHeader(HttpHeader.CONTENT_TYPE, requestedContentType.toContentTypeString());
}
@Override
public void readEntity(final ODataRequest request, ODataResponse response, final UriInfo uriInfo,
final ContentType requestedContentType) throws ODataApplicationException, SerializerException {
// First we have to figure out which entity set the requested entity is in
final EdmEntitySet edmEntitySet = getEdmEntitySet(uriInfo.asUriInfoResource());
// Next we fetch the requested entity from the database
Entity entity;
try {
entity = readEntityInternal(uriInfo.asUriInfoResource(), edmEntitySet);
} catch (DataProviderException e) {
throw new ODataApplicationException(e.getMessage(), 500, Locale.ENGLISH);
}
if (entity == null) {
// If no entity was found for the given key we throw an exception.
throw new ODataApplicationException("No entity found for this key", HttpStatusCode.NOT_FOUND
.getStatusCode(), Locale.ENGLISH);
} else {
// If an entity was found we proceed by serializing it and sending it to the client.
ODataSerializer serializer = odata.createSerializer(requestedContentType);
final ExpandOption expand = uriInfo.getExpandOption();
final SelectOption select = uriInfo.getSelectOption();
InputStream serializedContent = serializer.entity(edm, edmEntitySet.getEntityType(), entity,
EntitySerializerOptions.with()
.contextURL(isODataMetadataNone(requestedContentType) ? null :
getContextUrl(edmEntitySet, true, expand, select, null))
.expand(expand).select(select)
.build()).getContent();
response.setContent(serializedContent);
response.setStatusCode(HttpStatusCode.OK.getStatusCode());
response.setHeader(HttpHeader.CONTENT_TYPE, requestedContentType.toContentTypeString());
}
}
@Override
public void createEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo,
ContentType requestFormat, ContentType responseFormat)
throws ODataApplicationException, DeserializerException, SerializerException {
throw new ODataApplicationException("Entity create is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public void deleteEntity(ODataRequest request, ODataResponse response, UriInfo uriInfo)
throws ODataApplicationException {
throw new ODataApplicationException("Entity delete is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public void readPrimitive(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType format)
throws ODataApplicationException, SerializerException {
readProperty(response, uriInfo, format, false);
}
@Override
public void readComplex(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType format)
throws ODataApplicationException, SerializerException {
readProperty(response, uriInfo, format, true);
}
@Override
public void readPrimitiveValue(ODataRequest request, ODataResponse response, UriInfo uriInfo, ContentType format)
throws ODataApplicationException, SerializerException {
// First we have to figure out which entity set the requested entity is in
final EdmEntitySet edmEntitySet = getEdmEntitySet(uriInfo.asUriInfoResource());
// Next we fetch the requested entity from the database
final Entity entity;
try {
entity = readEntityInternal(uriInfo.asUriInfoResource(), edmEntitySet);
} catch (DataProviderException e) {
throw new ODataApplicationException(e.getMessage(), 500, Locale.ENGLISH);
}
if (entity == null) {
// If no entity was found for the given key we throw an exception.
throw new ODataApplicationException("No entity found for this key", HttpStatusCode.NOT_FOUND
.getStatusCode(), Locale.ENGLISH);
} else {
// Next we get the property value from the entity and pass the value to serialization
UriResourceProperty uriProperty = (UriResourceProperty) uriInfo
.getUriResourceParts().get(uriInfo.getUriResourceParts().size() - 1);
EdmProperty edmProperty = uriProperty.getProperty();
Property property = entity.getProperty(edmProperty.getName());
if (property == null) {
throw new ODataApplicationException("No property found", HttpStatusCode.NOT_FOUND
.getStatusCode(), Locale.ENGLISH);
} else {
if (property.getValue() == null) {
response.setStatusCode(HttpStatusCode.NO_CONTENT.getStatusCode());
} else {
String value = String.valueOf(property.getValue());
ByteArrayInputStream serializerContent = new ByteArrayInputStream(
value.getBytes(Charset.forName("UTF-8")));
response.setContent(serializerContent);
response.setStatusCode(HttpStatusCode.OK.getStatusCode());
response.setHeader(HttpHeader.CONTENT_TYPE, ContentType.TEXT_PLAIN.toContentTypeString());
}
}
}
}
private void readProperty(ODataResponse response, UriInfo uriInfo, ContentType contentType,
boolean complex) throws ODataApplicationException, SerializerException {
// To read a property we have to first get the entity out of the entity set
final EdmEntitySet edmEntitySet = getEdmEntitySet(uriInfo.asUriInfoResource());
Entity entity;
try {
entity = readEntityInternal(uriInfo.asUriInfoResource(), edmEntitySet);
} catch (DataProviderException e) {
throw new ODataApplicationException(e.getMessage(),
HttpStatusCode.INTERNAL_SERVER_ERROR.getStatusCode(), Locale.ENGLISH);
}
if (entity == null) {
// If no entity was found for the given key we throw an exception.
throw new ODataApplicationException("No entity found for this key",
HttpStatusCode.NOT_FOUND.getStatusCode(), Locale.ENGLISH);
} else {
// Next we get the property value from the entity and pass the value to serialization
UriResourceProperty uriProperty = (UriResourceProperty) uriInfo
.getUriResourceParts().get(uriInfo.getUriResourceParts().size() - 1);
EdmProperty edmProperty = uriProperty.getProperty();
Property property = entity.getProperty(edmProperty.getName());
if (property == null) {
throw new ODataApplicationException("No property found",
HttpStatusCode.NOT_FOUND.getStatusCode(), Locale.ENGLISH);
} else {
if (property.getValue() == null) {
response.setStatusCode(HttpStatusCode.NO_CONTENT.getStatusCode());
} else {
ODataSerializer serializer = odata.createSerializer(contentType);
final ContextURL contextURL = isODataMetadataNone(contentType) ? null :
getContextUrl(edmEntitySet, true, null, null, edmProperty.getName());
InputStream serializerContent = complex ?
serializer.complex(edm, (EdmComplexType) edmProperty.getType(), property,
ComplexSerializerOptions.with().contextURL(contextURL).build()).getContent() :
serializer.primitive(edm, (EdmPrimitiveType) edmProperty.getType(), property,
PrimitiveSerializerOptions.with()
.contextURL(contextURL)
.scale(edmProperty.getScale())
.nullable(edmProperty.isNullable())
.precision(edmProperty.getPrecision())
.maxLength(edmProperty.getMaxLength())
.unicode(edmProperty.isUnicode()).build()).getContent();
response.setContent(serializerContent);
response.setStatusCode(HttpStatusCode.OK.getStatusCode());
response.setHeader(HttpHeader.CONTENT_TYPE, contentType.toContentTypeString());
}
}
}
}
private Entity readEntityInternal(final UriInfoResource uriInfo, final EdmEntitySet entitySet)
throws DataProvider.DataProviderException {
// This method will extract the key values and pass them to the data provider
final UriResourceEntitySet resourceEntitySet = (UriResourceEntitySet) uriInfo.getUriResourceParts().get(0);
return dataProvider.read(entitySet, resourceEntitySet.getKeyPredicates());
}
private EdmEntitySet getEdmEntitySet(final UriInfoResource uriInfo) throws ODataApplicationException {
final List<UriResource> resourcePaths = uriInfo.getUriResourceParts();
/*
* To get the entity set we have to interpret all URI segments
*/
if (!(resourcePaths.get(0) instanceof UriResourceEntitySet)) {
throw new ODataApplicationException("Invalid resource type for first segment.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
/*
* Here we should interpret the whole URI but in this example we do not support navigation so we throw an exception
*/
final UriResourceEntitySet uriResource = (UriResourceEntitySet) resourcePaths.get(0);
return uriResource.getEntitySet();
}
private ContextURL getContextUrl(final EdmEntitySet entitySet, final boolean isSingleEntity,
final ExpandOption expand, final SelectOption select, final String navOrPropertyPath)
throws SerializerException {
return ContextURL.with().entitySet(entitySet)
.selectList(odata.createUriHelper().buildContextURLSelectList(entitySet.getEntityType(), expand, select))
.suffix(isSingleEntity ? Suffix.ENTITY : null)
.navOrPropertyPath(navOrPropertyPath)
.build();
}
@Override
public void updatePrimitive(final ODataRequest request, final ODataResponse response,
final UriInfo uriInfo, final ContentType requestFormat,
final ContentType responseFormat)
throws ODataApplicationException, DeserializerException, SerializerException {
throw new ODataApplicationException("Primitive property update is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public void updatePrimitiveValue(final ODataRequest request, ODataResponse response,
final UriInfo uriInfo, final ContentType requestFormat, final ContentType responseFormat)
throws ODataApplicationException, ODataLibraryException {
throw new ODataApplicationException("Primitive property update is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public void deletePrimitive(ODataRequest request, ODataResponse response, UriInfo uriInfo) throws
ODataApplicationException {
throw new ODataApplicationException("Primitive property delete is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public void deletePrimitiveValue(final ODataRequest request, ODataResponse response, final UriInfo uriInfo)
throws ODataApplicationException, ODataLibraryException {
throw new ODataApplicationException("Primitive property update is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public void updateComplex(final ODataRequest request, final ODataResponse response,
final UriInfo uriInfo, final ContentType requestFormat,
final ContentType responseFormat)
throws ODataApplicationException, DeserializerException, SerializerException {
throw new ODataApplicationException("Complex property update is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public void deleteComplex(final ODataRequest request, final ODataResponse response, final UriInfo uriInfo)
throws ODataApplicationException {
throw new ODataApplicationException("Complex property delete is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
@Override
public void updateEntity(final ODataRequest request, final ODataResponse response,
final UriInfo uriInfo, final ContentType requestFormat,
final ContentType responseFormat)
throws ODataApplicationException, DeserializerException, SerializerException {
throw new ODataApplicationException("Entity update is not supported yet.",
HttpStatusCode.NOT_IMPLEMENTED.getStatusCode(), Locale.ENGLISH);
}
public static boolean isODataMetadataNone(final ContentType contentType) {
return contentType.isCompatible(ContentType.APPLICATION_JSON)
&& ContentType.VALUE_ODATA_METADATA_NONE.equalsIgnoreCase(
contentType.getParameter(ContentType.PARAMETER_ODATA_METADATA));
}
}
| |
/*
* $Id$
*/
/*
Copyright (c) 2000-2016 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.test;
import java.io.*;
import java.math.*;
import java.util.*;
import java.security.MessageDigest;
import org.lockss.plugin.*;
import org.lockss.daemon.*;
import org.lockss.util.*;
import org.lockss.rewriter.*;
import org.lockss.extractor.*;
/**
* This is a mock version of <code>CachedUrl</code> used for testing
*
* @author Thomas S. Robertson
* @version 0.0
*/
public class MockCachedUrl implements CachedUrl {
Logger log = Logger.getLogger(MockCachedUrl.class);
private ArrayList versions;
private ArchivalUnit au;
private String url;
private InputStream cachedIS;
private CIProperties cachedProp = new CIProperties();
private boolean isLeaf = true;
private boolean doesExist = false;
private String content = null;
private long contentSize = -1;
private Reader reader = null;
private String cachedFile = null;
private boolean isResource;
private int version = 0;
private LinkRewriterFactory lrf = null;
private FileMetadataExtractor metadataExtractor = null;
private boolean released = false;
private boolean open = false;
private boolean getUncompressedCalled = false;
public MockCachedUrl(String url) {
this.versions = new ArrayList();
this.url = url;
}
public MockCachedUrl(String url, ArchivalUnit au) {
this(url);
this.au = au;
}
/**
* Construct a mock cached URL that is backed by a file.
*
* @param url
* @param file The name of the file to load.
* @param isResource If true, load the file name as a
* resource. If false, load as a file.
*/
public MockCachedUrl(String url, String file, boolean isResource) {
this(url);
this.isResource = isResource;
cachedFile = file;
}
public ArchivalUnit getArchivalUnit() {
return au;
}
public String getUrl() {
return url;
}
public CachedUrl getCuVersion(int version) {
if (version == 0) {
return this;
} else if (versions.isEmpty()) {
throw new UnsupportedOperationException("No versions.");
} else {
return (CachedUrl)versions.get(version);
}
}
public CachedUrl[] getCuVersions() {
return getCuVersions(Integer.MAX_VALUE);
}
public CachedUrl[] getCuVersions(int maxVersions) {
int min = Math.min(maxVersions, versions.size() + 1);
// Always supply this as the current version
CachedUrl[] retVal = new CachedUrl[min];
retVal[0] = this;
if (min > 1) {
System.arraycopy((CachedUrl[])versions.toArray(new CachedUrl[min]),
0, retVal, 1, min - 1);
}
return retVal;
}
public void setVersion(int version) {
this.version = version;
}
public int getVersion() {
return version;
}
public MockCachedUrl addVersion(String content) {
// Special case: If this is the first version, alias for 'addContent'
if (this.content == null) {
this.content = content;
return this;
} else {
MockCachedUrl mcu = new MockCachedUrl(url, au);
mcu.content = content;
mcu.version = versions.size() + 1;
versions.add(mcu);
return mcu;
}
}
public Reader openForReading() {
open = true;
if (content != null) {
return new StringReader(content);
}
if (reader != null) {
return reader;
}
return new StringReader("");
}
public LinkRewriterFactory getLinkRewriterFactory() {
return lrf;
}
public void setLinkRewriterFactory(LinkRewriterFactory lrf) {
this.lrf = lrf;
}
public void setOption(String option, String val) {
}
public boolean hasContent() {
return doesExist || content != null;
}
public boolean isLeaf() {
return isLeaf;
}
public void setIsLeaf(boolean isLeaf) {
this.isLeaf = isLeaf;
}
public int getType() {
return CachedUrlSetNode.TYPE_CACHED_URL;
}
public void setExists(boolean doesExist) {
this.doesExist = doesExist;
}
// Read interface - used by the proxy.
private InputStream newHashedInputStream(InputStream is,
HashedInputStream.Hasher hasher) {
return new BufferedInputStream(new HashedInputStream(is, hasher));
}
public InputStream getUnfilteredInputStream(HashedInputStream.Hasher hasher) {
log.debug3("MockCachedUrl.getUnfilteredInputStream with " +
(hasher == null ? "no " : "") + "Hasher");
open = true;
if (hasher == null) {
return getUnfilteredInputStream();
}
return newHashedInputStream(getUnfilteredInputStream(), hasher);
}
public InputStream getUnfilteredInputStream() {
log.debug3("MockCachedUrl.getUnfilteredInputStream");
InputStream res = null;
try {
if (cachedFile != null) {
if (isResource) {
res = ClassLoader.getSystemClassLoader().
getResourceAsStream(cachedFile);
} else {
res = new FileInputStream(cachedFile);
}
}
} catch (IOException ex) {
return null;
}
if (res == null && content != null) {
res = new StringInputStream(content);
}
if (res == null) {
res = cachedIS;
}
if (res != null) {
open = true;
}
return res;
}
public InputStream getUncompressedInputStream() {
getUncompressedCalled = true;
return getUnfilteredInputStream();
}
public InputStream getUncompressedInputStream(HashedInputStream.Hasher
hasher) {
getUncompressedCalled = true;
return getUnfilteredInputStream(hasher);
}
public boolean getUncompressedCalled() {
return getUncompressedCalled;
}
public InputStream openForHashing() {
return openForHashing(null);
}
public void delete() throws UnsupportedOperationException, IOException {
}
public InputStream openForHashing(HashedInputStream.Hasher hasher) {
String contentType = getContentType();
InputStream is = null;
// look for a FilterFactory
if (au != null) {
FilterFactory fact = au.getHashFilterFactory(contentType);
if (fact != null) {
InputStream unfis = getUnfilteredInputStream();
if (hasher != null) {
unfis = newHashedInputStream(unfis, hasher);
}
if (log.isDebug3()) {
log.debug3("Filtering " + contentType +
" with " + fact.getClass().getName());
}
try {
return fact.createFilteredInputStream(au, unfis, getEncoding());
} catch (PluginException e) {
IOUtil.safeClose(unfis);
throw new RuntimeException(e);
}
}
}
return getUnfilteredInputStream();
}
public long getContentSize() {
if (contentSize != -1) {
return contentSize;
}
if (content != null) {
return content.length();
}
if (cachedFile != null) {
if (isResource) {
InputStream in =
ClassLoader.getSystemClassLoader(). getResourceAsStream(cachedFile);
try {
return in.skip(Long.MAX_VALUE);
} catch (IOException e) {
return 100;
} finally {
IOUtil.safeClose(in);
}
} else {
return new File(cachedFile).length();
}
}
return content == null ? 0 : content.length();
}
public CIProperties getProperties(){
open = true;
return cachedProp;
}
public void addProperty(String key, String value) {
cachedProp.setProperty(key, value);
}
public String getContentType(){
open = true;
return cachedProp.getProperty(PROPERTY_CONTENT_TYPE);
}
public String getEncoding(){
open = true;
return Constants.DEFAULT_ENCODING;
}
public void storeContent(InputStream input) throws IOException {
storeContent(input, null);
}
public void storeContent(InputStream input, CIProperties headers)
throws IOException {
cachedIS = input;
if (headers != null) {
cachedProp = headers;
}
}
public FileMetadataExtractor getFileMetadataExtractor(MetadataTarget target) {
return metadataExtractor;
}
//mock specific acessors
public void setInputStream(InputStream is){
cachedIS = is;
}
public void setContent(String content) {
this.content = content;
}
public String getContent() {
return content;
}
public void setContentSize(long size) {
this.contentSize = size;
}
public void setReader(Reader reader) {
this.reader = reader;
}
public void setProperties(CIProperties prop){
cachedProp = prop;
}
public void setProperty(String key, String val) {
cachedProp.put(key, val);
}
public void setFileMetadataExtractor(FileMetadataExtractor me) {
metadataExtractor = me;
}
public void release() {
released = true;
open = false;
}
public boolean isOpen() {
return open;
}
public boolean isReleased() {
return released;
}
public CachedUrl getArchiveMemberCu(ArchiveMemberSpec ams) {
return null;
}
@Override
public boolean isArchiveMember() {
return false;
}
public String toString() {
StringBuffer sb = new StringBuffer(url.length()+17);
sb.append("[MockCachedUrl: ");
sb.append(url);
sb.append(", v: ");
sb.append(version);
sb.append("]");
return sb.toString();
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2008-2009, The KiWi Project (http://www.kiwi-project.eu)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the KiWi Project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* Contributor(s):
*
*
*/
package kiwi.service.reasoning;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import kiwi.api.triplestore.TripleStore;
import kiwi.service.reasoning.ast.BNode;
import kiwi.service.reasoning.ast.Conjunction;
import kiwi.service.reasoning.ast.Formula;
import kiwi.service.reasoning.ast.Literal;
import kiwi.service.reasoning.ast.Rule;
import kiwi.service.reasoning.ast.Term;
import kiwi.service.reasoning.ast.TriplePattern;
import kiwi.service.reasoning.ast.Uri;
import kiwi.service.reasoning.ast.Variable;
import kiwi.service.reasoning.parser.ParseException;
import kiwi.service.reasoning.parser.SimpleKWRLProgramParser;
import org.jboss.seam.ScopeType;
import org.jboss.seam.annotations.AutoCreate;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.jboss.seam.log.Log;
/** Rule compiler takes parsed rules and compiles them into CompiledRules.
*
* It works by translating rules into HQL queries.
*
* @author Jakub Kotowski
*
*/
@Scope(ScopeType.APPLICATION)
@AutoCreate
@Name("ruleCompiler")
public class RuleCompiler {
//Most of the compilation code now assumes that rule bodies are always conjunctions
//temporary solution before implementing more general bodies
public static final String SIMPLE_BODIES="code that needs to be changed to support more than simple bodies refers to this constant.";
private static final String AND = " and ";
private static final String OR = " or ";
private static final String TRIPLE_PREFIX = "t";
@In
private TripleStore tripleStore;
@Logger
private Log log;
/** A helper method to determine all the places at which a variable occurs. The map is then used to construct the select part and the where clause of a query.
*
* For rule "($1 http://foaf/firstName $2), ($1 $3 $2) -> ($1 http://some/property http://some/thing)"
*
* it constructs the following map:
* String ArrayList<String>
* "$1" "t0.subject", "t1.subject"
* "$2" "t0.object", "t2.object"
* "$3" "t2.property"
*
* @param rule A rule returned by the parser.
* @return Returns the map described above.
*/
private static VariablePositions getVariablePositions(Rule rule) {
Conjunction body = (Conjunction) rule.getBody().getFormula();
VariablePositions vars = new VariablePositions();
//ArrayList<String> vars;
int i = 1;
for (Formula formula : body.getFormulas()) {
TriplePattern tp = (TriplePattern) formula;
if (tp.getSubject().isVariable()) {
VariablePosition pos = new VariablePosition(i, TriplePattern.TriplePatternPosition.SUBJECT);
vars.addPosition((Variable) tp.getSubject(), pos);
}
if (tp.getProperty().isVariable()) {
VariablePosition pos = new VariablePosition(i, TriplePattern.TriplePatternPosition.PROPERTY);
vars.addPosition((Variable) tp.getProperty(), pos);
}
if (tp.getObject().isVariable()) {
VariablePosition pos = new VariablePosition(i, TriplePattern.TriplePatternPosition.OBJECT);
vars.addPosition((Variable) tp.getObject(), pos);
}
i++;
}
return vars;
}
//TODO typed literals
/** Compiles rule head.
*
* Assumes that the body has already been compiled: expects rule.variableMap to be already constructed and filled.
*
* We allow b-nodes in subject and object because of instantiated rules.
*
* @param rule The partially compiled rule.
*/
private void compileRuleHead(CompiledRule rule) {
if (rule.getRule().getHead().isInconsistency()) {
if (rule.getRule().getHead().getFormula() == null)
throw new ReasoningException("Inconsistency symbol in the rule head must be translated to triples before rule compilation!");
}
Conjunction head = rule.getHead();
for (Formula formula : head.getFormulas()) {
TriplePattern tp = (TriplePattern) formula;
switch (tp.getSubject().getTermType()) {
case BNODE:
BNode bnode = (BNode) tp.getSubject();
tp.setKiwiSubject(tripleStore.createAnonResource( bnode.getName() ));
break;
case URI:
Uri uri = (Uri) tp.getSubject();
tp.setKiwiSubject(tripleStore.createUriResource( uri.getUri() ));
break;
case VARIABLE:
case CONSTRUCTION_VARIABLE:
Variable variable = (Variable) tp.getSubject();
if (! rule.getBody().getVariables().contains(variable))
rule.addConstructionVariable(variable);
break;
default:
throw new ReasoningException("Unsupported term type in head triple subject position: "+tp.getSubject().getTermType());
}
switch (tp.getProperty().getTermType()) {
case URI:
Uri uri = (Uri) tp.getProperty();
tp.setKiwiProperty( tripleStore.createUriResource(uri.getUri()) );
break;
case VARIABLE:
case CONSTRUCTION_VARIABLE:
Variable variable = (Variable) tp.getProperty();
if (! rule.getBody().getVariables().contains(variable))
rule.addConstructionVariable(variable);
break;
default:
throw new ReasoningException("Unsupported term type in head triple property position: "+tp.getProperty().getTermType());
}
switch (tp.getObject().getTermType()) {
case BNODE:
BNode bnode = (BNode) tp.getObject();
tp.setKiwiObject(tripleStore.createAnonResource( bnode.getName() ));
break;
case URI:
Uri uri = (Uri) tp.getObject();
tp.setKiwiObject(tripleStore.createUriResource( uri.getUri() ));
break;
case LITERAL:
case TYPED_LITERAL:
Literal literal = (Literal) tp.getObject();
tp.setKiwiObject( tripleStore.createLiteral( literal.getLiteral() ) );
break;
case VARIABLE:
case CONSTRUCTION_VARIABLE:
Variable variable = (Variable) tp.getObject();
if (! rule.getBody().getVariables().contains(variable))
rule.addConstructionVariable(variable);
break;
default:
throw new ReasoningException("Unsupported term type in head triple object position: "+tp.getObject().getTermType());
}
}
}
private static String toHQL(VariablePosition varpos) {
switch (varpos.getTriplePatternPosition()) {
case SUBJECT:
return TRIPLE_PREFIX+varpos.getAtomNumber()+".subject ";
case PROPERTY:
return TRIPLE_PREFIX+varpos.getAtomNumber()+".property ";
case OBJECT:
return TRIPLE_PREFIX+varpos.getAtomNumber()+".object ";
default:
throw new IllegalArgumentException("Triple pattern position "+varpos.getTriplePatternPosition()+" is not supported.");
}
}
/** Constructs an HQL where clause.
*
* @param instantiatingPosition if greater than 1 then instead of matching a triple pattern at the position, this function adds
* a set of triple ids matching expression to be used for rule instantiation. The HQL parameter name is tripleIds.
*
* If compiling a normal rule (that isn't supposed to be instantiated) then instantiatingPosition must be 0 or less.
*
* @return
*/
private static String constructWhereClause(VariablePositions variablePositions, Conjunction conjunction, int instantiatingPosition, int round, CompiledRule crule) {
String newTriplesMatchingRule = "select t.id from KiWiTriple t, RuleBody rb "+
"WHERE t.reasoningRound = "+round+" AND rb.ruleName = :ruleName AND rb.position = "+ instantiatingPosition +" AND "+
"(rb.triplePattern.subject.variable = true OR rb.triplePattern.subject.node = t.subject) AND "+
"(rb.triplePattern.property.variable = true OR rb.triplePattern.property.node = t.property) AND "+
"(rb.triplePattern.object.variable = true OR rb.triplePattern.object.node = t.object)";
crule.subquery = newTriplesMatchingRule;
StringBuffer whereClause = new StringBuffer();
for (Variable variable : variablePositions.getVariables()) {
List<VariablePosition> positions = variablePositions.getVariablePositions(variable);
if (positions.size() < 2)
continue;
for (int i = 0; i < positions.size()-1; i++) {
whereClause.append(AND);
whereClause.append(toHQL(positions.get(i)));
whereClause.append(" = ");
whereClause.append(toHQL(positions.get(i+1)));
}
}
int i = 1;//we count atoms from 1, see also VariablePositions
for (Formula formula : conjunction.getFormulas()) {
if (i == instantiatingPosition) {
whereClause.append(AND);
// whereClause.append(TRIPLE_PREFIX).append(i).append(".id").append(" IN (:tripleIds) ");
// whereClause.append(TRIPLE_PREFIX).append(i).append(".id").append(" IN (").append(newTriplesMatchingRule).append(") ");
whereClause.append(TRIPLE_PREFIX).append(i).append(".reasoningRound").append(" = ").append(round);
//i++;
//continue;
}
TriplePattern tp = (TriplePattern) formula;
Term term;
term = tp.getSubject();
if (!term.isVariable()) {
whereClause.append(AND);
switch (term.getTermType()) {
case URI:
whereClause.append(TRIPLE_PREFIX).append(i).append(".subject.uri = '");
whereClause.append(((Uri)term).getUri()).append("'");
break;
case BNODE:
whereClause.append(TRIPLE_PREFIX).append(i).append(".subject.anonId = '");
whereClause.append(((BNode)term).getName()).append("'");
break;
default:
throw new IllegalArgumentException("Term of type "+term.getTermType()+" is not allowed in subject position.");
}
}
term = tp.getProperty();
if (!term.isVariable()) {
whereClause.append(AND);
switch (term.getTermType()) {
case URI:
whereClause.append(TRIPLE_PREFIX).append(i).append(".property.uri = '");
whereClause.append(((Uri)term).getUri()).append("'");
break;
default:
throw new IllegalArgumentException("Term of type "+term.getTermType()+" is not allowed in property position.");
}
}
term = tp.getObject();
if (!term.isVariable()) {
whereClause.append(AND);
switch (term.getTermType()) {
case URI:
whereClause.append(TRIPLE_PREFIX).append(i).append(".object.uri = '");
whereClause.append(((Uri)term).getUri()).append("'");
break;
case BNODE:
whereClause.append(TRIPLE_PREFIX).append(i).append(".object.anonId = '");
whereClause.append(((BNode)term).getName()).append("'");
break;
case LITERAL:
case TYPED_LITERAL:
whereClause.append(TRIPLE_PREFIX).append(i).append(".object.content = '");
whereClause.append(((Literal)term).getLiteral()).append("'");
break;
default:
throw new IllegalArgumentException("Term of type "+term.getTermType()+" is not allowed in object position.");
}
}
whereClause.append(AND).append("(");
whereClause.append(TRIPLE_PREFIX).append(i).append(".deleted = false "); //deleted means versioned and marked as deleted (i.e. it's not base), it can also be inferred
whereClause.append(OR).append(TRIPLE_PREFIX).append(i).append(".inferred = true ");
whereClause.append(") ");
i++;
}// for each formula in conjunction
whereClause = whereClause.replace(0, AND.length(), ""); //remove the leading AND
return whereClause.toString();
}
//TODO typed literals
//TODO rules with no variables shared between body and head?
//TODO rules without variables?
/**
* We allow b-nodes in subject and object because of instantiated rules.
*
*/
private static CompiledRule compileRuleBody(Rule rule, int instantiatingPosition, int round) {
Formula formula = rule.getBody().getFormula();
if (!formula.isConjunction())
throw new IllegalArgumentException("Rule "+rule.getName()+" uses not implemented features: "+formula.getFormulaType());
Conjunction conjunction;
conjunction = (Conjunction) formula;
for (Formula f : conjunction.getFormulas())
if (!f.isAtomic())
throw new IllegalArgumentException("Rule "+rule.getName()+" uses not implemented features: "+f.getFormulaType());
CompiledRule crule;
if (instantiatingPosition < 1)
crule = new CompiledRule(rule);
else
crule = new CompiledRuleInstance(rule);
VariablePositions variablePositions = getVariablePositions(rule);
//log.debug("VariablePositions for "+rule.getName()+": "+variablePositions.toString());
String whereClause = constructWhereClause(variablePositions, conjunction, instantiatingPosition, round, crule);
Set<Variable> bodyVariables = rule.getBody().getVariables();
Set<Variable> headVariables = rule.getHead().getVariables();
/** Variables to be selected in the query are those that are both in the rule body and in the rule head. */
List<Variable> selectVariables = new ArrayList<Variable>();
for (Variable var : bodyVariables)
if (headVariables.contains(var))
selectVariables.add(var);
//if (selectVariables.size() == 0)
// throw new ReasoningException("Rules have to contain at least one variable shared between the rule body and the rule head.");
if (selectVariables.size() == 0) {
// there is no variable that is shared between the rule body and the rule head
// therefore we'll select all body variables
crule.setHasNoSharedVariables(true);
selectVariables.addAll(bodyVariables);
}
StringBuffer selectClause = new StringBuffer(); String COMMA = ",";
for (Variable var : selectVariables) {
VariablePosition pos = variablePositions.getVariablePositions(var).get(0);
selectClause.append(COMMA).append(toHQL(pos)); //TODO: verify
crule.addSelectVariable(var);
}
//add triple id selection (so that we can compute ids for new resources)
for (int i = 1; i <= conjunction.getFormulas().size(); i++)
selectClause.append(COMMA).append(TRIPLE_PREFIX).append(i).append(".id");
//remove the leading comma
selectClause = selectClause.replace(0, COMMA.length(), "");
//This means that the selectClause contains variableMap.size variable selections + body.getNumConjuncts triple id selections
StringBuffer fromClause = new StringBuffer();
for (int i=1; i <= conjunction.getFormulas().size(); i++)
fromClause.append(COMMA).append("KiWiTriple ").append(TRIPLE_PREFIX).append(i);
fromClause = fromClause.replace(0, COMMA.length(), "");
crule.setQuery("select "+selectClause.toString()+" from "+fromClause.toString()+" where "+whereClause.toString());
return crule;
}
public CompiledRule compileRule(Rule rule) {
return compileRule(rule, -1, -1);
}
/** Compiles rules into HQL queries and prepares creation of new triples according to rule heads.
*
* @param rule The rule to be compiled.
* @param instantiatingPosition 0 or lower for normal rules and the atom position (greater than or equal to 1) at which
* the rule will be instantiated - i.e. instead of matching the atom(triple pattern), triple ids will be matched (of triples
* by which the rule will be instantiated).
* @return the compiled rule.
*/
public CompiledRule compileRule(Rule rule, int instantiatingPosition, int round) {
CompiledRule compiledRule;
try {
if (!rule.getBody().isGround())
compiledRule = compileRuleBody(rule, instantiatingPosition, round);
else
compiledRule = new CompiledRule(rule);
compileRuleHead(compiledRule);
} catch(ReasoningException ex) {
log.error("Could not compile rule \""+rule.getName()+"\". Error was: #0", ex, ex.getMessage());
return null;
}
return compiledRule;
}
public ArrayList<CompiledRule> compileRules(List<Rule> parsedRules) {
ArrayList<CompiledRule> compiledRules = new ArrayList<CompiledRule>();
//log.info("Compiling rules.");
Rule rule;
for (int i=0; i < parsedRules.size(); i++) {
rule = parsedRules.get(i);
//log.info("Compiling rule "+rule.getRuleString());
CompiledRule crule = compileRule(rule);
if (crule != null)
compiledRules.add(crule);
else
continue;
//log.info("Created query #0.", crule.query);
}
//log.info("Rules compiled.");
return compiledRules;
}
@SuppressWarnings("static-access")
public static void main(String[] args) throws UnsupportedEncodingException, ParseException {
SimpleKWRLProgramParser parser;
InputStream is = new ByteArrayInputStream("($1 http://xmlns.com/foaf/0.1/firstName $2) -> (http://dummy.com/person/dummy http://xmlns.com/foaf/0.1/firstName $2)".getBytes("UTF-8"));
parser = new SimpleKWRLProgramParser(is);
//processTest(parser.rule());
String query = compileRuleBody(parser.Rule(), -1, -1).getQuery();
System.out.println(query);
}
}
| |
package io.kickflip.sdk.av;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.os.Environment;
import android.util.Log;
import android.util.Pair;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.google.common.eventbus.DeadEvent;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import com.mobileman.moments.android.Constants;
import com.mobileman.moments.android.MomentsApplication;
import com.mobileman.moments.android.R;
import com.mobileman.moments.android.Util;
import com.mobileman.moments.android.backend.model.User;
import com.mobileman.moments.android.frontend.fragments.DebugSettingsFragment;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.channels.FileChannel;
import java.util.ArrayDeque;
import io.kickflip.sdk.FileUtils;
import io.kickflip.sdk.Kickflip;
import io.kickflip.sdk.api.KickflipApiClient;
import io.kickflip.sdk.api.KickflipCallback;
import io.kickflip.sdk.api.json.HlsStream;
import io.kickflip.sdk.api.json.Response;
import io.kickflip.sdk.api.json.Stream;
import io.kickflip.sdk.api.s3.S3BroadcastManager;
import io.kickflip.sdk.event.BroadcastIsBufferingEvent;
import io.kickflip.sdk.event.BroadcastIsLiveEvent;
import io.kickflip.sdk.event.BroadcastIsReadyEvent;
import io.kickflip.sdk.event.HlsManifestWrittenEvent;
import io.kickflip.sdk.event.HlsSegmentWrittenEvent;
import io.kickflip.sdk.event.MuxerFinishedEvent;
import io.kickflip.sdk.event.S3UploadEvent;
import io.kickflip.sdk.event.StreamLocationAddedEvent;
import io.kickflip.sdk.event.ThumbnailWrittenEvent;
import io.kickflip.sdk.exception.KickflipException;
import static com.google.common.base.Preconditions.checkArgument;
import static io.kickflip.sdk.Kickflip.isKitKat;
/**
* Broadcasts HLS video and audio to <a href="https://kickflip.io">Kickflip.io</a>.
* The lifetime of this class correspond to a single broadcast. When performing multiple broadcasts,
* ensure reference to only one {@link io.kickflip.sdk.av.Broadcaster} is held at any one time.
* {@link io.kickflip.sdk.fragment.BroadcastFragment} illustrates how to use Broadcaster in this pattern.
* <p/>
* Example Usage:
* <p/>
* <ol>
* <li>Construct {@link Broadcaster()} with your Kickflip.io Client ID and Secret</li>
* <li>Call {@link Broadcaster#setPreviewDisplay(io.kickflip.sdk.view.GLCameraView)} to assign a
* {@link io.kickflip.sdk.view.GLCameraView} for displaying the live Camera feed.</li>
* <li>Call {@link io.kickflip.sdk.av.Broadcaster#startRecording()} to begin broadcasting</li>
* <li>Call {@link io.kickflip.sdk.av.Broadcaster#stopRecording()} to end the broadcast.</li>
* </ol>
*
* @hide
*/
// TODO: Make HLS / RTMP Agnostic
public class Broadcaster extends AVRecorder {
private static final String TAG = "Broadcaster";
private static final boolean VERBOSE = !MomentsApplication.isOnProduction();
private static final int MIN_BITRATE = 3 * 100 * 1000; // 300 kbps
private static final int CRITICAL_QUEUE_SEGMENT_COUNT = 3;
private static final int FACEBOOK_POSTER_MIN_WIDTH = 610;
private static final int FACEBOOK_POSTER_MIN_HEIGHT = 320;
private Context mContext;
private KickflipApiClient mKickflip;
private User mUser;
private HlsStream mStream;
private HlsFileObserver mFileObserver;
private S3BroadcastManager mS3Manager;
private ArrayDeque<Pair<String, File>> mUploadQueue;
private SessionConfig mConfig;
private BroadcastListener mBroadcastListener;
private EventBus mEventBus;
private boolean mReadyToBroadcast; // Kickflip user registered and endpoint ready
private boolean mSentBroadcastLiveEvent;
private boolean firstManifestSent;
private boolean mSentBroadcastReadyEvent;
private int mVideoBitrate;
private File mManifestSnapshotDir; // Directory where manifest snapshots are stored
private File mVodManifest; // VOD HLS Manifest containing complete history
private volatile int mNumSegmentsWritten;
private int mLastRealizedBandwidthBytesPerSec; // Bandwidth snapshot for adapting bitrate
private boolean mDeleteAfterUploading; // Should recording files be deleted as they're uploaded?
private ObjectMetadata mS3ManifestMeta;
/**
* Construct a Broadcaster with Session settings and Kickflip credentials
*
* @param context the host application {@link android.content.Context}.
* @param config the Session configuration. Specifies bitrates, resolution etc.
* @param CLIENT_ID the Client ID available from your Kickflip.io dashboard.
* @param CLIENT_SECRET the Client Secret available from your Kickflip.io dashboard.
*/
public Broadcaster(Context context, SessionConfig config, String CLIENT_ID, String CLIENT_SECRET) {
super(config);
checkArgument(CLIENT_ID != null && CLIENT_SECRET != null);
init();
mContext = context;
mConfig = config;
mConfig.getMuxer().setEventBus(mEventBus);
mVideoBitrate = mConfig.getVideoBitrate();
if (VERBOSE) Log.i(TAG, "Initial video bitrate : " + mVideoBitrate);
mManifestSnapshotDir = new File(mConfig.getOutputPath().substring(0, mConfig.getOutputPath().lastIndexOf("/") + 1), "m3u8");
mManifestSnapshotDir.mkdir();
mVodManifest = new File(mManifestSnapshotDir, Stream.HISTORICAL_VIDEO_FILENAME);
writeEventManifestHeader(mConfig.getHlsSegmentDuration());
String watchDir = config.getOutputDirectory().getAbsolutePath();
mFileObserver = new HlsFileObserver(watchDir, mEventBus);
mFileObserver.startWatching();
if (VERBOSE) Log.i(TAG, "Watching " + watchDir);
mReadyToBroadcast = false;
mKickflip = Kickflip.setup(context, null, new KickflipCallback() {
@Override
public void onSuccess(Response response) {
User user = (User) response;
mUser = user;
if (VERBOSE) Log.i(TAG, "Got storage credentials " + response);
}
@Override
public void onError(KickflipException error) {
Log.e(TAG, "Failed to get storage credentials" + error.toString());
if (mBroadcastListener != null)
mBroadcastListener.onBroadcastError(error);
}
});
}
private void init() {
mDeleteAfterUploading = true;
mLastRealizedBandwidthBytesPerSec = 0;
mNumSegmentsWritten = 0;
mSentBroadcastLiveEvent = false;
mEventBus = new EventBus("Broadcaster");
mEventBus.register(this);
}
/**
* Set whether local recording files be deleted after successful upload. Default is true.
* <p/>
* Must be called before recording begins. Otherwise this method has no effect.
*
* @param doDelete whether local recording files be deleted after successful upload.
*/
public void setDeleteLocalFilesAfterUpload(boolean doDelete) {
if (!isRecording()) {
mDeleteAfterUploading = doDelete;
}
}
/**
* Set a Listener to be notified of basic Broadcast events relevant to
* updating a broadcasting UI.
* e.g: Broadcast begun, went live, stopped, or encountered an error.
* <p/>
* See {@link io.kickflip.sdk.av.BroadcastListener}
*
* @param listener
*/
public void setBroadcastListener(BroadcastListener listener) {
mBroadcastListener = listener;
}
/**
* Set an {@link com.google.common.eventbus.EventBus} to be notified
* of events between {@link io.kickflip.sdk.av.Broadcaster},
* {@link io.kickflip.sdk.av.HlsFileObserver}, {@link io.kickflip.sdk.api.s3.S3BroadcastManager}
* e.g: A HLS MPEG-TS segment or .m3u8 Manifest was written to disk, or uploaded.
* See a list of events in {@link io.kickflip.sdk.event}
*
* @return
*/
public EventBus getEventBus() {
return mEventBus;
}
/**
* Start broadcasting.
* <p/>
* Must be called after {@link Broadcaster#setPreviewDisplay(io.kickflip.sdk.view.GLCameraView)}
*/
@Override
public void startRecording() {
super.startRecording();
mCamEncoder.requestThumbnailOnDeltaFrameWithScaling(10, 1);
Log.i(TAG, "got StartStreamResponse");
// checkArgument(response instanceof HlsStream, "Got unexpected StartStream Response");
// onGotStreamResponse(getDefaultStream());
mKickflip.startStream(mConfig.getStream(), new KickflipCallback() {
@Override
public void onSuccess(Response response) {
mCamEncoder.requestThumbnailOnDeltaFrameWithScaling(10, 1);
Log.i(TAG, "got StartStreamResponse");
checkArgument(response instanceof HlsStream, "Got unexpected StartStream Response");
HlsStream hlsStream = (HlsStream) response;
mKickflip.getActiveUser().setStream(hlsStream);
onGotStreamResponse(hlsStream);
}
@Override
public void onError(KickflipException error) {
Log.w(TAG, "Error getting start stream response! " + error);
}
});
}
private HlsStream getDefaultStream() {
/* HlsStream hlsStream = new HlsStream();
hlsStream.setmBucket(SECRETS.AWS_BUCKET);
hlsStream.setmAwsKey(SECRETS.AWS_KEY);
hlsStream.setmAwsSecret(SECRETS.AWS_SECRET);
hlsStream.setmPrefix("streams/android/22042015/");
// hlsStream.setmRegion("US_EAST_1");
*/
return mStream;
}
private void onGotStreamResponse(HlsStream stream) {
mStream = stream;
if (mConfig.shouldAttachLocation()) {
Kickflip.addLocationToStream(mContext, mStream, mEventBus);
}
// mStream.setTitle(mConfig.getTitle());
// mStream.setDescription(mConfig.getDescription());
// mStream.setExtraInfo(mConfig.getExtraInfo());
// mStream.setIsPrivate(mConfig.isPrivate());
if (VERBOSE) Log.i(TAG, "Got hls start stream " + stream);
mS3Manager = new S3BroadcastManager(this, new BasicAWSCredentials(stream.getAwsKey(), stream.getAwsSecret()));
mS3Manager.setRegion(mStream.getRegion());
mS3Manager.addRequestInterceptor(mS3RequestInterceptor);
mReadyToBroadcast = true;
submitQueuedUploadsToS3();
mEventBus.post(new BroadcastIsBufferingEvent());
if (mBroadcastListener != null) {
mBroadcastListener.onBroadcastStart();
}
}
/**
* Check if the broadcast has gone live
*
* @return
*/
public boolean isLive() {
return mSentBroadcastLiveEvent;
}
/**
* Stop broadcasting and release resources.
* After this call this Broadcaster can no longer be used.
*/
@Override
public void stopRecording() {
super.stopRecording();
mSentBroadcastLiveEvent = false;
if (mStream != null) {
if (VERBOSE) Log.i(TAG, "Stopping Stream");
mKickflip.stopStream(mStream, new KickflipCallback() {
@Override
public void onSuccess(Response response) {
if (response instanceof HlsStream) {
mStream = (HlsStream) response;
}
if (VERBOSE) Log.i(TAG, "Got stop stream response " + response);
}
@Override
public void onError(KickflipException error) {
Log.w(TAG, "Error getting stop stream response! " + error);
}
});
}
}
public void onS3BroadcastFatalError(String errorMessage) {
if (mBroadcastListener != null) {
final KickflipException kickflipException = new KickflipException(errorMessage, 0);
mBroadcastListener.onBroadcastError(kickflipException);
stopRecording();
}
}
/*
@Subscribe
public void onSegmentWritten(HlsSegmentWrittenEvent event) {
try {
File hlsSegment = event.getSegment();
queueOrSubmitUpload(keyForFilename(hlsSegment.getName()), hlsSegment);
if (isKitKat() && mConfig.isAdaptiveBitrate() && isRecording()) {
// Adjust bitrate to match expected filesize
long actualSegmentSizeBytes = hlsSegment.length();
long expectedSizeBytes = ((mConfig.getAudioBitrate() / 8) + (mVideoBitrate / 8)) * mConfig.getHlsSegmentDuration();
float filesizeRatio = actualSegmentSizeBytes / (float) expectedSizeBytes;
if (VERBOSE)
Log.i(TAG, "OnSegmentWritten. Segment size: " + (actualSegmentSizeBytes / 1000) + "kB. ratio: " + filesizeRatio);
if (filesizeRatio < .7) {
if (mLastRealizedBandwidthBytesPerSec != 0) {
// Scale bitrate while not exceeding available bandwidth
float scaledBitrate = mVideoBitrate * (1 / filesizeRatio);
float bandwidthBitrate = mLastRealizedBandwidthBytesPerSec * 8;
mVideoBitrate = (int) Math.min(scaledBitrate, bandwidthBitrate);
} else {
// Scale bitrate to match expected fileSize
mVideoBitrate *= (1 / filesizeRatio);
}
if (VERBOSE) Log.i(TAG, "Scaling video bitrate to " + mVideoBitrate + " bps");
adjustVideoBitrate(mVideoBitrate);
if ((mBroadcastListener != null) && (DebugSettingsFragment.DebugRecordingSettingsActive)) {
int videoBitrateKbps = (int) (mVideoBitrate / (8 * 1000.0));
mBroadcastListener.onBitrateChange(videoBitrateKbps, (int)(mLastRealizedBandwidthBytesPerSec / 1000.0), false, mS3Manager.pendingItemsQueueCount());
}
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
*/
/**
* A .ts file was written in the recording directory.
* <p/>
* Use this opportunity to verify the segment is of expected size
* given the target bitrate
* <p/>
* Called on a background thread
*/
@Subscribe
public void onSegmentWritten(HlsSegmentWrittenEvent event) {
try {
File hlsSegment = event.getSegment();
queueOrSubmitUpload(keyForFilename(hlsSegment.getName()), hlsSegment);
if (isKitKat() && mConfig.isAdaptiveBitrate() && isRecording()) {
// Adjust bitrate to match expected filesize
long actualSegmentSizeBytes = hlsSegment.length();
long expectedSizeBytes = ((mConfig.getAudioBitrate() / 8) + (mVideoBitrate / 8)) * mConfig.getHlsSegmentDuration();
float filesizeRatio = actualSegmentSizeBytes / (float) expectedSizeBytes;
if (VERBOSE)
Log.i(TAG, "OnSegmentWritten. Segment size: " + (actualSegmentSizeBytes / 1000) + "kB. ratio: " + filesizeRatio);
double fileSizeRatioChangeLevel = 0.7f;
if ((filesizeRatio < fileSizeRatioChangeLevel) || ((mS3Manager.pendingItemsQueueCount() == 0) && (filesizeRatio > 1))) {
if (mLastRealizedBandwidthBytesPerSec != 0) {
// Scale bitrate while not exceeding available bandwidth
float scaledBitrate = 0;
if (filesizeRatio < 1) {
scaledBitrate = mVideoBitrate * (1 / filesizeRatio);
} else {
scaledBitrate = mVideoBitrate * filesizeRatio;
scaledBitrate *= 1.2f; // let's try it faster
Log.i(TAG, "Looks like the upload speed is better, trying to improve the video bitrate faster now!");
}
float bandwidthBitrate = mLastRealizedBandwidthBytesPerSec * 8;
mVideoBitrate = (int) Math.min(scaledBitrate, bandwidthBitrate);
mVideoBitrate *= 0.9f; // add some buffer
} else {
// Scale bitrate to match expected fileSize
mVideoBitrate *= (1 / filesizeRatio);
mVideoBitrate *= 0.9f; // add some buffer
}
/* if (mS3Manager.pendingItemsQueueCount() > CRITICAL_QUEUE_SEGMENT_COUNT) {
Log.i(TAG, "Too many items in the upload queue, adjusting video bitrate...");
mVideoBitrate *= 0.7f;
}
*/
if (VERBOSE) Log.i(TAG, "Scaling video bitrate to " + mVideoBitrate + " bps");
if ((mBroadcastListener != null) && (DebugSettingsFragment.DebugRecordingSettingsActive)) {
int videoBitrateKbps = (int) (mVideoBitrate / (8 * 1000.0));
mBroadcastListener.onBitrateChange(videoBitrateKbps, (int)(mLastRealizedBandwidthBytesPerSec / 1000.0), true, mS3Manager.pendingItemsQueueCount());
}
adjustVideoBitrate(mVideoBitrate);
} else {
/* if (mS3Manager.pendingItemsQueueCount() > CRITICAL_QUEUE_SEGMENT_COUNT) {
Log.i(TAG, "Too many items in the upload queue, downgrading the video bitrate");
mVideoBitrate *= 0.7f;
adjustVideoBitrate(mVideoBitrate);
}
*/
if ((mBroadcastListener != null) && (DebugSettingsFragment.DebugRecordingSettingsActive)) {
int videoBitrateKbps = (int) (mVideoBitrate / (8 * 1000.0));
mBroadcastListener.onBitrateChange(videoBitrateKbps, (int)(mLastRealizedBandwidthBytesPerSec / 1000.0), false, mS3Manager.pendingItemsQueueCount());
}
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
/*
private void adjustBitrateDynamically(int uploadByteRate) {
if (isKitKat() && mConfig.isAdaptiveBitrate() && isRecording()) {
mLastRealizedBandwidthBytesPerSec = uploadByteRate;
// Adjust video encoder bitrate per bandwidth of just-completed upload
if (VERBOSE) {
Log.i(TAG, "Bandwidth: " + (mLastRealizedBandwidthBytesPerSec / 1000.0) + " kBps. Encoder video: " + (mVideoBitrate / 8) / 1000.0 + " kBps, audio: " + ((mConfig.getAudioBitrate() / 8) / 1000.0) + " kbps");
}
if (mLastRealizedBandwidthBytesPerSec < (((mVideoBitrate + mConfig.getAudioBitrate()) / 8))) {
// The new bitrate is equal to the last upload bandwidth, never inferior to MIN_BITRATE, nor superior to the initial specified bitrate
mVideoBitrate = Math.max(Math.min(mLastRealizedBandwidthBytesPerSec * 8, mConfig.getVideoBitrate()), MIN_BITRATE);
if (mS3Manager.pendingItemsQueueCount() > CRITICAL_QUEUE_SEGMENT_COUNT) {
Log.i(TAG, "Too many items in the upload queue, downgrading the video bitrate");
mVideoBitrate *= 0.7f;
} else {
mVideoBitrate *= 0.9f; // add some buffer
}
if (VERBOSE) {
Log.i(TAG, String.format("Adjusting video bitrate to %f kBps. Bandwidth: %f kBps",
mVideoBitrate / (8 * 1000.0), mLastRealizedBandwidthBytesPerSec / 1000.0));
}
if ((mBroadcastListener != null) && (DebugSettingsFragment.DebugRecordingSettingsActive)) {
int videoBitrateKbps = (int) (mVideoBitrate / (8 * 1000.0));
mBroadcastListener.onBitrateChange(videoBitrateKbps, (int) (mLastRealizedBandwidthBytesPerSec / 1000.0), true, mS3Manager.pendingItemsQueueCount());
}
adjustVideoBitrate(mVideoBitrate);
} else { // with current upload parameters, we can do better recording bitrate, try it
if (mLastRealizedBandwidthBytesPerSec != 0) {
long expectedSizeBytes = ((mConfig.getAudioBitrate() / 8) + (mVideoBitrate / 8)) * mConfig.getHlsSegmentDuration();
float bandwidthPotentialRatio = uploadByteRate / (float) expectedSizeBytes;
if ((mS3Manager.pendingItemsQueueCount() == 0) && (bandwidthPotentialRatio > 1.3)) {
mVideoBitrate *= 1.2f;
adjustVideoBitrate(mVideoBitrate);
Log.i(TAG, "Looks like the upload speed is better, trying to improve the video bitrate");
Log.i(TAG, String.format("Adjusting video bitrate to %f kBps. Bandwidth: %f kBps",
mVideoBitrate / (8 * 1000.0), mLastRealizedBandwidthBytesPerSec / 1000.0));
}
}
if ((mBroadcastListener != null) && (DebugSettingsFragment.DebugRecordingSettingsActive)) {
int videoBitrateKbps = (int) (mVideoBitrate / (8 * 1000.0));
mBroadcastListener.onBitrateChange(videoBitrateKbps, (int)(mLastRealizedBandwidthBytesPerSec / 1000.0), false, mS3Manager.pendingItemsQueueCount());
}
}
}
}
*/
/**
* An S3 .ts segment upload completed.
* <p/>
* Use this opportunity to adjust bitrate based on the bandwidth
* measured during this segment's transmission.
* <p/>
* Called on a background thread
*/
/* private void onSegmentUploaded(S3UploadEvent uploadEvent) {
if (mDeleteAfterUploading) {
boolean deletedFile = uploadEvent.getFile().delete();
if (VERBOSE)
Log.i(TAG, "Deleting uploaded segment. " + uploadEvent.getFile().getAbsolutePath() + " Succcess: " + deletedFile);
}
if (!mSentBroadcastLiveEvent) {
mLastRealizedBandwidthBytesPerSec = uploadEvent.getUploadByteRate();
Log.i(TAG, "Checking minimum bandwidth... current value: " + Integer.toString(mLastRealizedBandwidthBytesPerSec));
if ((mLastRealizedBandwidthBytesPerSec != 0) && (mLastRealizedBandwidthBytesPerSec < Constants.MINIMUM_UPLOAD_BITRATE)) {
Log.d(Util.TAG, "Detected upload speed " + Integer.toString(mLastRealizedBandwidthBytesPerSec) + ", expected minimum: " + Integer.toString(Constants.MINIMUM_UPLOAD_BITRATE));
mBroadcastListener.onLowUploadBitrateDetected();
}
}
try {
int uploadByteRate = uploadEvent.getUploadByteRate();
adjustBitrateDynamically(uploadByteRate);
} catch (Exception e) {
Log.i(TAG, "OnSegUpload excep");
e.printStackTrace();
}
}
*/
private void onSegmentUploaded(S3UploadEvent uploadEvent) {
if (mDeleteAfterUploading) {
boolean deletedFile = uploadEvent.getFile().delete();
if (VERBOSE)
Log.i(TAG, "Deleting uploaded segment. " + uploadEvent.getFile().getAbsolutePath() + " Succcess: " + deletedFile);
}
if (!mSentBroadcastLiveEvent) {
mLastRealizedBandwidthBytesPerSec = uploadEvent.getUploadByteRate();
Log.i(TAG, "Checking minimum bandwidth... current value: " + Integer.toString(mLastRealizedBandwidthBytesPerSec));
if ((mLastRealizedBandwidthBytesPerSec != 0) && (mLastRealizedBandwidthBytesPerSec < Constants.MINIMUM_UPLOAD_BITRATE)) {
Log.d(Util.TAG, "Detected upload speed " + Integer.toString(mLastRealizedBandwidthBytesPerSec) + ", expected minimum: " + Integer.toString(Constants.MINIMUM_UPLOAD_BITRATE));
mBroadcastListener.onLowUploadBitrateDetected();
}
}
try {
if (isKitKat() && mConfig.isAdaptiveBitrate() && isRecording()) {
mLastRealizedBandwidthBytesPerSec = uploadEvent.getUploadByteRate();
// Adjust video encoder bitrate per bandwidth of just-completed upload
if (VERBOSE) {
Log.i(TAG, "Bandwidth: " + (mLastRealizedBandwidthBytesPerSec / 1000.0) + " kBps. Encoder: " + ((mVideoBitrate + mConfig.getAudioBitrate()) / 8) / 1000.0 + " kBps");
}
if (mLastRealizedBandwidthBytesPerSec < (((mVideoBitrate + mConfig.getAudioBitrate()) / 8))) {
// The new bitrate is equal to the last upload bandwidth, never inferior to MIN_BITRATE, nor superior to the initial specified bitrate
mVideoBitrate = Math.max(Math.min(mLastRealizedBandwidthBytesPerSec * 8, mConfig.getVideoBitrate()), MIN_BITRATE);
if (VERBOSE) {
Log.i(TAG, String.format("Adjusting video bitrate to %f kBps. Bandwidth: %f kBps",
mVideoBitrate / (8 * 1000.0), mLastRealizedBandwidthBytesPerSec / 1000.0));
}
if ((mBroadcastListener != null) && (DebugSettingsFragment.DebugRecordingSettingsActive)) {
int videoBitrateKbps = (int) (mVideoBitrate / (8 * 1000.0));
mBroadcastListener.onBitrateChange(videoBitrateKbps, (int) (mLastRealizedBandwidthBytesPerSec / 1000.0), false, mS3Manager.pendingItemsQueueCount());
}
adjustVideoBitrate(mVideoBitrate);
}
}
} catch (Exception e) {
Log.i(TAG, "OnSegUpload excep");
e.printStackTrace();
}
}
/**
* A .m3u8 file was written in the recording directory.
* <p/>
* Called on a background thread
*/
@Subscribe
public void onManifestUpdated(HlsManifestWrittenEvent e) {
if (!isRecording()) {
if (Kickflip.getBroadcastListener() != null) {
if (VERBOSE) Log.i(TAG, "Sending onBroadcastStop");
Kickflip.getBroadcastListener().onBroadcastStop();
}
}
if (VERBOSE) Log.i(TAG, "onManifestUpdated. Last segment? " + !isRecording());
// Copy m3u8 at this moment and queue it to uploading
// service
final File copy = new File(mManifestSnapshotDir, e.getManifestFile().getName()
.replace(".m3u8", "_" + mNumSegmentsWritten + ".m3u8"));
try {
if (VERBOSE) {
Log.i(TAG, "Copying " + e.getManifestFile().getAbsolutePath() + " to " + copy.getAbsolutePath());
}
FileUtils.copy(e.getManifestFile(), copy);
} catch (IOException e1) {
e1.printStackTrace();
}
queueOrSubmitUpload(keyForFilename("index.m3u8"), copy);
appendLastManifestEntryToEventManifest(e.getManifestFile(), !isRecording());
mNumSegmentsWritten++;
}
/**
* An S3 .m3u8 upload completed.
* <p/>
* Called on a background thread
*/
private void onManifestUploaded(S3UploadEvent uploadEvent) {
if (mDeleteAfterUploading) {
if (VERBOSE) Log.i(TAG, "Deleting " + uploadEvent.getFile().getAbsolutePath());
uploadEvent.getFile().delete();
String uploadUrl = uploadEvent.getDestinationUrl();
if (uploadEvent.isLastUploadFile()) {//(uploadUrl.substring(uploadUrl.lastIndexOf(File.separator) + 1).equals("vod.m3u8")) {
if (VERBOSE) Log.i(TAG, "Deleting " + mConfig.getOutputDirectory());
mFileObserver.stopWatching();
FileUtils.deleteDirectory(mConfig.getOutputDirectory());
}
}
if (!mSentBroadcastLiveEvent) {
if (firstManifestSent) {
if ((mKickflip != null) && (mStream.getStreamState() == Constants.kStreamStateReady)) {
updateStreamStateToStreaming();
}
} else firstManifestSent = true;
}
}
public void updateStreamStateToStreaming() {
mKickflip.updateStreamState(mStream, Constants.kStreamStateStreaming, new KickflipCallback() {
@Override
public void onSuccess(Response response) {
Log.d(TAG, "Stream state changed to -Streaming- successfully");
Log.i(TAG, "onBroadcastStreaming @ " + mStream.getKickflipUrl());
mEventBus.post(new BroadcastIsLiveEvent(((HlsStream) mStream).getKickflipUrl()));
mSentBroadcastLiveEvent = true;
if (mBroadcastListener != null)
mBroadcastListener.onBroadcastStreaming(mStream, mLastRealizedBandwidthBytesPerSec);
}
@Override
public void onError(KickflipException error) {
Log.d(TAG, "Stream state changed to -Streaming- failed!");
Log.e(TAG, error.getMessage());
}
});
}
public void onTryingToReconnect(final boolean networkIssuesPresent) {
mBroadcastListener.onTryingToReconnect(networkIssuesPresent);
}
private void onStreamReady(Stream stream) {
if (!mSentBroadcastReadyEvent) {
mEventBus.post(new BroadcastIsReadyEvent());
mSentBroadcastReadyEvent = true;
if (mBroadcastListener != null)
mBroadcastListener.onBroadcastReady(mStream);
}
}
/**
* A thumbnail was written in the recording directory.
* <p/>
* Called on a background thread
*/
@Subscribe
public void onThumbnailWritten(ThumbnailWrittenEvent e) {
try {
File file = e.getThumbnailFile();
queueOrSubmitUpload(keyForFilename(mStream.getThumbnailFileName()), file);
if ((getSessionConfig() != null) && (getSessionConfig().isFbStoryCreationEnabled())) {
File fbthumbnailFile = createCopyOfFile(file);
addOverlayToBitmap(fbthumbnailFile);
queueOrSubmitUpload(keyForFilename(mStream.getFBThumbnailFileName()), fbthumbnailFile);
}
} catch (Exception ex) {
Log.i(TAG, "Error writing thumbanil");
ex.printStackTrace();
}
}
private File createCopyOfFile(File file) {
File copyFile = new File(file.getParent(), mStream.getFBThumbnailFileName());
FileInputStream inStream = null;
FileOutputStream outStream = null;
try {
inStream = new FileInputStream(file);
outStream = new FileOutputStream(copyFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
if ((inStream != null) && (outStream != null)) {
FileChannel inChannel = inStream.getChannel();
FileChannel outChannel = outStream.getChannel();
try {
inChannel.transferTo(0, inChannel.size(), outChannel);
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
inStream.close();
} catch (IOException e) {
e.printStackTrace();
}
try {
outStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return copyFile;
}
private void addOverlayToBitmap(File file) {
LayoutInflater mInflater = (LayoutInflater)mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
//Inflate the layout into a view and configure it the way you like
RelativeLayout view = new RelativeLayout(mContext);
mInflater.inflate(R.layout.overlay_thumbnail, view, true);
ImageView backgroundImageView = (ImageView) view.findViewById(R.id.overlayThumbnailBackgroundImageView);
Bitmap backgroundBitmap = BitmapFactory.decodeFile(file.getAbsolutePath());
backgroundImageView.setImageBitmap(backgroundBitmap);
/* TextView overlayThumbnailUsernameTextView = (TextView) view.findViewById(R.id.overlayThumbnailUsernameTextView);
if (mKickflip.getActiveUser() != null) {
overlayThumbnailUsernameTextView.setText(mKickflip.getActiveUser().getName());
}
*/
/* ImageView overlayThumbnailUsernameImageView = (ImageView)view.findViewById(R.id.overlayThumbnailUsernameImageView);
if (MainActivity.loggedUserBitmap != null) {
if (MainActivity.loggedUserBitmap.getDrawable() instanceof BitmapDrawable) {
Bitmap bitmap = ((BitmapDrawable) MainActivity.loggedUserBitmap.getDrawable()).getBitmap();
overlayThumbnailUsernameImageView.setImageBitmap(bitmap);
}
}
*/
//Provide it with a layout params. It should necessarily be wrapping the
//content as we not really going to have a parent for it.
view.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT,
RelativeLayout.LayoutParams.WRAP_CONTENT));
//Pre-measure the view so that height and width don't remain null.
view.measure(View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED),
View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED));
//Assign a size and position to the view and all of its descendants
view.layout(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight());
//Create the bitmap
Bitmap bitmapWithOverlay = Bitmap.createBitmap(view.getMeasuredWidth(),
view.getMeasuredHeight(),
Bitmap.Config.ARGB_8888);
//Create a canvas with the specified bitmap to draw into
Canvas c = new Canvas(bitmapWithOverlay);
//Render this view (and all of its children) to the given Canvas
view.draw(c);
String path = Environment.getExternalStorageDirectory().toString();
OutputStream fOut = null;
try {
fOut = new FileOutputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
bitmapWithOverlay = getScaledBitmap(bitmapWithOverlay, FACEBOOK_POSTER_MIN_WIDTH, FACEBOOK_POSTER_MIN_HEIGHT);
bitmapWithOverlay.compress(Bitmap.CompressFormat.PNG, 75, fOut);
try {
fOut.flush();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
fOut.close(); // do not forget to close the stream
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static Bitmap getScaledBitmap(Bitmap b, int reqWidth, int reqHeight)
{
return Bitmap.createScaledBitmap(b, reqWidth, reqHeight, true);
}
/**
* A thumbnail upload completed.
* <p/>
* Called on a background thread
*/
private void onThumbnailUploaded(S3UploadEvent uploadEvent) {
if((uploadEvent.getDestinationUrl() != null) && (uploadEvent.getDestinationUrl().endsWith(mStream.getThumbnailFileName()))) {
Log.i(TAG, "Thumbnail successfully uploaded to the the server, changing stream state to ready");
mKickflip.updateStreamState(mStream, Constants.kStreamStateReady, new KickflipCallback() {
@Override
public void onSuccess(Response response) {
Log.i(TAG, "state changed to ready");
checkArgument(response instanceof HlsStream, "Got unexpected updateStreamState Response");
HlsStream hlsStream = (HlsStream) response;
mStream = hlsStream;
mKickflip.getActiveUser().setStream(hlsStream);
checkArgument((hlsStream.getStreamState() == Constants.kStreamStateReady), "Got updateStreamState Response, but the state is not changed!");
onStreamReady(hlsStream);
}
@Override
public void onError(KickflipException error) {
Log.w(TAG, "Error getting updateStreamState stream response! " + error);
}
});
}
if (mDeleteAfterUploading) uploadEvent.getFile().delete();
}
@Subscribe
public void onStreamLocationAdded(StreamLocationAddedEvent event) {
sendStreamMetaData();
}
@Subscribe
public void onDeadEvent(DeadEvent e) {
if (VERBOSE) Log.i(TAG, "DeadEvent ");
}
@Subscribe
public void onMuxerFinished(MuxerFinishedEvent e) {
// TODO: Broadcaster uses AVRecorder reset()
// this seems better than nulling and recreating Broadcaster
// since it should be usable as a static object for
// bg recording
}
private void sendStreamMetaData() {
if (mStream != null) {
mKickflip.setStreamInfo(mStream, null);
}
}
/**
* Construct an S3 Key for a given filename
*
*/
private String keyForFilename(String fileName) {
if (fileName.endsWith(".jpg")) {
return mStream.getThumbnailPathPrefix() + "/" + fileName;
} else {
return mStream.getAwsS3Prefix() + "/" + fileName;
}
}
/**
* Handle an upload, either submitting to the S3 client
* or queueing for submission once credentials are ready
*
* @param key destination key
* @param file local file
*/
private void queueOrSubmitUpload(String key, File file) {
if (mReadyToBroadcast) {
submitUpload(key, file);
} else {
if (VERBOSE) Log.i(TAG, "queueing " + key + " until S3 Credentials available");
queueUpload(key, file);
}
}
/**
* Queue an upload for later submission to S3
*
* @param key destination key
* @param file local file
*/
private void queueUpload(String key, File file) {
if (mUploadQueue == null)
mUploadQueue = new ArrayDeque<>();
mUploadQueue.add(new Pair<>(key, file));
}
/**
* Submit all queued uploads to the S3 client
*/
private void submitQueuedUploadsToS3() {
if (mUploadQueue == null) return;
for (Pair<String, File> pair : mUploadQueue) {
submitUpload(pair.first, pair.second);
}
}
private void submitUpload(final String key, final File file) {
submitUpload(key, file, false);
}
private void submitUpload(final String key, final File file, boolean lastUpload) {
mS3Manager.queueUpload(mStream.getAwsS3Bucket(), key, file, lastUpload);
}
/**
* An S3 Upload completed.
* <p/>
* Called on a background thread
*/
public void onS3UploadComplete(S3UploadEvent uploadEvent) {
if (VERBOSE) Log.i(TAG, "Upload completed for " + uploadEvent.getDestinationUrl());
if (uploadEvent.getDestinationUrl().contains(".m3u8")) {
onManifestUploaded(uploadEvent);
} else if (uploadEvent.getDestinationUrl().contains(".ts")) {
onSegmentUploaded(uploadEvent);
} else if (uploadEvent.getDestinationUrl().contains(".jpg")) {
onThumbnailUploaded(uploadEvent);
}
}
public SessionConfig getSessionConfig() {
return mConfig;
}
private void writeEventManifestHeader(int targetDuration) {
FileUtils.writeStringToFile(
String.format("#EXTM3U\n" +
"#EXT-X-PLAYLIST-TYPE:VOD\n" +
"#EXT-X-VERSION:3\n" +
"#EXT-X-MEDIA-SEQUENCE:0\n" +
"#EXT-X-TARGETDURATION:%d\n", targetDuration),
mVodManifest, false
);
}
private void appendLastManifestEntryToEventManifest(File sourceManifest, boolean lastEntry) {
String result = FileUtils.tail2(sourceManifest, lastEntry ? 3 : 2);
FileUtils.writeStringToFile(result, mVodManifest, true);
if (lastEntry) {
submitUpload(keyForFilename(Stream.HISTORICAL_VIDEO_FILENAME), mVodManifest, true);
if (VERBOSE) Log.i(TAG, "Queued master manifest " + mVodManifest.getAbsolutePath());
}
}
S3BroadcastManager.S3RequestInterceptor mS3RequestInterceptor = new S3BroadcastManager.S3RequestInterceptor() {
@Override
public void interceptRequest(PutObjectRequest request) {
if(request.getKey().contains("index.m3u8")) {
if (mS3ManifestMeta == null) {
mS3ManifestMeta = new ObjectMetadata();
}
mS3ManifestMeta.setCacheControl("max-age=0");
request.setMetadata(mS3ManifestMeta);
}
}
};
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.pregelix.dataflow;
import java.io.DataOutput;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
import edu.uci.ics.hyracks.hdfs.ContextFactory;
import edu.uci.ics.hyracks.hdfs2.dataflow.FileSplitsFactory;
import edu.uci.ics.pregelix.api.graph.Vertex;
import edu.uci.ics.pregelix.api.io.VertexInputFormat;
import edu.uci.ics.pregelix.api.io.VertexReader;
import edu.uci.ics.pregelix.api.util.BspUtils;
import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
@SuppressWarnings("rawtypes")
public class VertexFileScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
private static final long serialVersionUID = 1L;
private final FileSplitsFactory splitsFactory;
private final IConfigurationFactory confFactory;
private final int fieldSize = 2;
private final String[] scheduledLocations;
private final boolean[] executed;
/**
* @param spec
*/
public VertexFileScanOperatorDescriptor(JobSpecification spec, RecordDescriptor rd, List<InputSplit> splits,
String[] scheduledLocations, IConfigurationFactory confFactory) throws HyracksException {
super(spec, 0, 1);
List<FileSplit> fileSplits = new ArrayList<FileSplit>();
for (int i = 0; i < splits.size(); i++) {
fileSplits.add((FileSplit) splits.get(i));
}
this.splitsFactory = new FileSplitsFactory(fileSplits);
this.confFactory = confFactory;
this.scheduledLocations = scheduledLocations;
this.executed = new boolean[scheduledLocations.length];
Arrays.fill(executed, false);
this.recordDescriptors[0] = rd;
}
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
throws HyracksDataException {
final List<FileSplit> splits = splitsFactory.getSplits();
return new AbstractUnaryOutputSourceOperatorNodePushable() {
private final ContextFactory ctxFactory = new ContextFactory();
private String jobId;
@Override
public void initialize() throws HyracksDataException {
try {
Configuration conf = confFactory.createConfiguration(ctx);
//get the info for spilling vertices to HDFS
jobId = BspUtils.getJobId(conf);
writer.open();
for (int i = 0; i < scheduledLocations.length; i++) {
if (scheduledLocations[i].equals(ctx.getJobletContext().getApplicationContext().getNodeId())) {
/**
* pick one from the FileSplit queue
*/
synchronized (executed) {
if (!executed[i]) {
executed[i] = true;
} else {
continue;
}
}
loadVertices(ctx, conf, i);
}
}
writer.close();
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
/**
* Load the vertices
*
* @parameter IHyracks ctx
* @throws IOException
* @throws IllegalAccessException
* @throws InstantiationException
* @throws ClassNotFoundException
* @throws InterruptedException
*/
@SuppressWarnings("unchecked")
private void loadVertices(final IHyracksTaskContext ctx, Configuration conf, int splitId)
throws IOException, ClassNotFoundException, InterruptedException, InstantiationException,
IllegalAccessException, NoSuchFieldException, InvocationTargetException {
int treeVertexSizeLimit = IterationUtils.getVFrameSize(ctx) / 2;
int dataflowPageSize = ctx.getFrameSize();
ByteBuffer frame = ctx.allocateFrame();
FrameTupleAppender appender = new FrameTupleAppender(dataflowPageSize);
appender.reset(frame, true);
VertexInputFormat vertexInputFormat = BspUtils.createVertexInputFormat(conf);
InputSplit split = splits.get(splitId);
TaskAttemptContext mapperContext = ctxFactory.createContext(conf, splitId);
mapperContext.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
VertexReader vertexReader = vertexInputFormat.createVertexReader(split, mapperContext);
vertexReader.initialize(split, mapperContext);
Vertex readerVertex = BspUtils.createVertex(mapperContext.getConfiguration());
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldSize);
DataOutput dos = tb.getDataOutput();
IterationUtils.setJobContext(BspUtils.getJobId(conf), ctx, mapperContext);
Vertex.taskContext = mapperContext;
/**
* empty vertex value
*/
Writable emptyVertexValue = BspUtils.createVertexValue(conf);
while (vertexReader.nextVertex()) {
readerVertex = vertexReader.getCurrentVertex();
tb.reset();
if (readerVertex.getVertexId() == null) {
throw new IllegalArgumentException("loadVertices: Vertex reader returned a vertex "
+ "without an id! - " + readerVertex);
}
if (readerVertex.getVertexValue() == null) {
readerVertex.setVertexValue(emptyVertexValue);
}
WritableComparable vertexId = readerVertex.getVertexId();
vertexId.write(dos);
tb.addFieldEndOffset();
readerVertex.write(dos);
tb.addFieldEndOffset();
if (tb.getSize() >= treeVertexSizeLimit || tb.getSize() > dataflowPageSize) {
//if (tb.getSize() < dataflowPageSize) {
//spill vertex to HDFS if it cannot fit into a tree storage page
String pathStr = BspUtils.TMP_DIR + jobId + File.separator + vertexId;
readerVertex.setSpilled(pathStr);
tb.reset();
vertexId.write(dos);
tb.addFieldEndOffset();
//vertex content will be spilled to HDFS
readerVertex.write(dos);
tb.addFieldEndOffset();
readerVertex.setUnSpilled();
}
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
if (appender.getTupleCount() <= 0) {
throw new IllegalStateException("zero tuples in a frame!");
}
FrameUtils.flushFrame(frame, writer);
appender.reset(frame, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
//this place should never be reached, otherwise it is a bug
throw new IllegalStateException(
"An overflow vertex content should not be flushed into bulkload dataflow.");
}
}
}
vertexReader.close();
if (appender.getTupleCount() > 0) {
FrameUtils.flushFrame(frame, writer);
}
System.gc();
}
};
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.logging;
import hudson.FeedAdapter;
import hudson.Functions;
import hudson.init.Initializer;
import static hudson.init.InitMilestone.PLUGINS_PREPARED;
import hudson.model.AbstractModelObject;
import jenkins.model.Jenkins;
import hudson.model.RSS;
import hudson.util.CopyOnWriteMap;
import jenkins.model.JenkinsLocationConfiguration;
import jenkins.model.ModelObjectWithChildren;
import jenkins.model.ModelObjectWithContextMenu.ContextMenu;
import org.apache.commons.io.filefilter.WildcardFileFilter;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerProxy;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.HttpRedirect;
import org.kohsuke.stapler.interceptor.RequirePOST;
import javax.servlet.ServletException;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
/**
* Owner of {@link LogRecorder}s, bound to "/log".
*
* @author Kohsuke Kawaguchi
*/
public class LogRecorderManager extends AbstractModelObject implements ModelObjectWithChildren, StaplerProxy {
/**
* {@link LogRecorder}s keyed by their {@linkplain LogRecorder#name name}.
*/
public transient final Map<String,LogRecorder> logRecorders = new CopyOnWriteMap.Tree<>();
public String getDisplayName() {
return Messages.LogRecorderManager_DisplayName();
}
public String getSearchUrl() {
return "/log";
}
public LogRecorder getDynamic(String token) {
return getLogRecorder(token);
}
public LogRecorder getLogRecorder(String token) {
return logRecorders.get(token);
}
static File configDir() {
return new File(Jenkins.get().getRootDir(), "log");
}
/**
* Loads the configuration from disk.
*/
public void load() throws IOException {
logRecorders.clear();
File dir = configDir();
File[] files = dir.listFiles((FileFilter)new WildcardFileFilter("*.xml"));
if(files==null) return;
for (File child : files) {
String name = child.getName();
name = name.substring(0,name.length()-4); // cut off ".xml"
LogRecorder lr = new LogRecorder(name);
lr.load();
logRecorders.put(name,lr);
}
}
/**
* Creates a new log recorder.
*/
@RequirePOST
public HttpResponse doNewLogRecorder(@QueryParameter String name) {
Jenkins.checkGoodName(name);
logRecorders.put(name,new LogRecorder(name));
// redirect to the config screen
return new HttpRedirect(name+"/configure");
}
public ContextMenu doChildrenContextMenu(StaplerRequest request, StaplerResponse response) throws Exception {
ContextMenu menu = new ContextMenu();
menu.add("all","All Jenkins Logs");
for (LogRecorder lr : logRecorders.values()) {
menu.add(lr.getSearchUrl(), lr.getDisplayName());
}
return menu;
}
/**
* Configure the logging level.
*/
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings("LG_LOST_LOGGER_DUE_TO_WEAK_REFERENCE")
@RequirePOST
public HttpResponse doConfigLogger(@QueryParameter String name, @QueryParameter String level) {
Jenkins.get().checkPermission(Jenkins.ADMINISTER);
Level lv;
if(level.equals("inherit"))
lv = null;
else
lv = Level.parse(level.toUpperCase(Locale.ENGLISH));
Logger.getLogger(name).setLevel(lv);
return new HttpRedirect("levels");
}
/**
* RSS feed for log entries.
*/
public void doRss( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
doRss(req, rsp, Jenkins.logRecords);
}
/**
* Renders the given log recorders as RSS.
*/
/*package*/ static void doRss(StaplerRequest req, StaplerResponse rsp, List<LogRecord> logs) throws IOException, ServletException {
// filter log records based on the log level
String level = req.getParameter("level");
if(level!=null) {
Level threshold = Level.parse(level);
List<LogRecord> filtered = new ArrayList<>();
for (LogRecord r : logs) {
if(r.getLevel().intValue() >= threshold.intValue())
filtered.add(r);
}
logs = filtered;
}
RSS.forwardToRss("Hudson log","", logs, new FeedAdapter<LogRecord>() {
public String getEntryTitle(LogRecord entry) {
return entry.getMessage();
}
public String getEntryUrl(LogRecord entry) {
return "log"; // TODO: one URL for one log entry?
}
public String getEntryID(LogRecord entry) {
return String.valueOf(entry.getSequenceNumber());
}
public String getEntryDescription(LogRecord entry) {
return Functions.printLogRecord(entry);
}
public Calendar getEntryTimestamp(LogRecord entry) {
GregorianCalendar cal = new GregorianCalendar();
cal.setTimeInMillis(entry.getMillis());
return cal;
}
public String getEntryAuthor(LogRecord entry) {
return JenkinsLocationConfiguration.get().getAdminAddress();
}
},req,rsp);
}
@Initializer(before=PLUGINS_PREPARED)
public static void init(Jenkins h) throws IOException {
h.getLog().load();
}
@Override
@Restricted(NoExternalUse.class)
public Object getTarget() {
if (!SKIP_PERMISSION_CHECK) {
Jenkins.get().checkPermission(Jenkins.ADMINISTER);
}
return this;
}
/**
* Escape hatch for StaplerProxy-based access control
*/
@Restricted(NoExternalUse.class)
public static /* Script Console modifiable */ boolean SKIP_PERMISSION_CHECK = Boolean.getBoolean(LogRecorderManager.class.getName() + ".skipPermissionCheck");
}
| |
/*
* Copyright 2015 Adaptris Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adaptris.jdbc.connection;
import static com.adaptris.jdbc.connection.FailoverConfig.JDBC_ALWAYS_VERIFY;
import static com.adaptris.jdbc.connection.FailoverConfig.JDBC_AUTO_COMMIT;
import static com.adaptris.jdbc.connection.FailoverConfig.JDBC_DEBUG;
import static com.adaptris.jdbc.connection.FailoverConfig.JDBC_DRIVER;
import static com.adaptris.jdbc.connection.FailoverConfig.JDBC_PASSWORD;
import static com.adaptris.jdbc.connection.FailoverConfig.JDBC_TEST_STATEMENT;
import static com.adaptris.jdbc.connection.FailoverConfig.JDBC_URL_ROOT;
import static com.adaptris.jdbc.connection.FailoverConfig.JDBC_USERNAME;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import org.junit.Before;
import org.junit.Test;
import com.adaptris.core.util.JdbcUtil;
/**
*
* @author howard
*/
public class FailoverConnectionTest extends com.adaptris.interlok.junit.scaffolding.BaseCase {
@Before
public void setUp() throws Exception {
initialiseDatabase();
}
@Test
public void testFailoverConfigSetters() throws Exception {
FailoverConfig fc1 = new FailoverConfig();
try {
fc1.setConnectionUrls(null);
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
try {
fc1.addConnectionUrl(null);
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
try {
fc1.setTestStatement(null);
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
try {
fc1.setDatabaseDriver(null);
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
}
@Test
public void testInit() throws Exception {
Properties p = createProperties();
p.setProperty(JDBC_USERNAME, "myUser");
p.setProperty(JDBC_PASSWORD, "myPassword");
FailoverConfig fc = new FailoverConfig(p);
assertEquals("myUser", fc.getUsername());
assertEquals("myPassword", fc.getPassword());
fc = new FailoverConfig(createProperties());
assertNull(fc.getUsername());
assertNull(fc.getPassword());
}
@Test
public void testFailoverConfigEquality() throws Exception {
FailoverConfig fc1 = createFailoverConfig();
fc1.toString();
assertFalse(fc1.equals(null));
assertFalse(fc1.equals(new Object()));
assertFalse(fc1.equals(new FailoverConfig()));
assertTrue(fc1.equals(fc1));
FailoverConfig fc2 = createFailoverConfig();
assertEquals(fc1, fc2);
fc2 = new FailoverConfig(createProperties());
assertEquals(fc1, fc2);
}
@Test
public void testFailoverConfigClone() throws Exception {
FailoverConfig fc1 = createFailoverConfig();
FailoverConfig fc2 = (FailoverConfig) fc1.clone();
assertEquals(fc1.toString(), fc1, fc2);
assertEquals(fc1.toString(), fc1.hashCode(), fc2.hashCode());
}
@Test
public void testNoDriver() throws Exception {
FailoverConfig cfg = createFailoverConfig();
cfg.setDatabaseDriver("hello.there");
try {
FailoverConnection c = new FailoverConnection(cfg);
fail();
} catch (SQLException expected) {
}
}
@Test
public void testConnection() throws Exception {
try {
FailoverConnection c = new FailoverConnection(new FailoverConfig());
fail("suceeded w/o any urls");
} catch (Exception e) {
;
}
FailoverConfig cfg = createFailoverConfig();
FailoverConnection c = new FailoverConnection(cfg);
assertEquals(cfg, c.getConfig());
Connection con = c.getConnection();
try {
assertFalse(con.isClosed());
createTables(con);
} finally {
JdbcUtil.closeQuietly(con);
c.close();
}
}
@Test
public void testConnection_BadPassword() throws Exception {
FailoverConfig cfg = createFailoverConfig();
cfg.setPassword("ALTPW:abcdef");
FailoverConnection c = new FailoverConnection(cfg);
try {
Connection con = c.getConnection();
fail("Shouldn't be able to decrypt password");
} catch (SQLException expected) {
}
}
@Test
public void testConnection_NoDebug() throws Exception {
try {
FailoverConnection c = new FailoverConnection(new FailoverConfig());
fail("suceeded w/o any urls");
} catch (Exception e) {
;
}
FailoverConfig cfg = createFailoverConfig();
cfg.setAlwaysValidateConnection(true);
cfg.setDebugMode(false);
FailoverConnection c = new FailoverConnection(cfg);
assertEquals(cfg, c.getConfig());
Connection con = c.getConnection();
try {
assertFalse(con.isClosed());
createTables(con);
} finally {
JdbcUtil.closeQuietly(con);
c.close();
}
}
@Test
public void testConnection_NoValidate() throws Exception {
try {
FailoverConnection c = new FailoverConnection(new FailoverConfig());
fail("suceeded w/o any urls");
} catch (Exception e) {
;
}
FailoverConfig cfg = createFailoverConfig();
cfg.setAlwaysValidateConnection(false);
cfg.setDebugMode(false);
FailoverConnection c = new FailoverConnection(cfg);
assertEquals(cfg, c.getConfig());
Connection con = c.getConnection();
try {
assertFalse(con.isClosed());
createTables(con);
} finally {
JdbcUtil.closeQuietly(con);
c.close();
}
}
private FailoverConfig createFailoverConfig() {
FailoverConfig fc = new FailoverConfig();
fc.addConnectionUrl(PROPERTIES.getProperty("jdbc.url"));
fc.addConnectionUrl(PROPERTIES.getProperty("jdbc.url.2"));
fc.setAlwaysValidateConnection(true);
fc.setAutoCommit(true);
fc.setDatabaseDriver(PROPERTIES.getProperty("jdbc.driver"));
fc.setTestStatement("SELECT seq_number from sequences where id='id'");
fc.setDebugMode(true);
return fc;
}
protected static Properties createProperties() {
Properties p = new Properties();
p.setProperty(JDBC_DRIVER, PROPERTIES.getProperty("jdbc.driver"));
p.setProperty(JDBC_AUTO_COMMIT, "true");
p.setProperty(JDBC_DEBUG, "true");
p.setProperty(JDBC_ALWAYS_VERIFY, "true");
p.setProperty(JDBC_TEST_STATEMENT, "SELECT seq_number from sequences where id='id'");
p.setProperty(JDBC_URL_ROOT + ".1", PROPERTIES.getProperty("jdbc.url"));
p.setProperty(JDBC_URL_ROOT + ".2", PROPERTIES.getProperty("jdbc.url.2"));
return p;
}
private void initialiseDatabase() throws Exception {
Connection dbCon = connect();
try {
createTables(dbCon);
}
finally {
JdbcUtil.closeQuietly(dbCon);
}
}
protected static Connection connect() throws Exception {
Class.forName(PROPERTIES.getProperty("jdbc.driver"));
return DriverManager.getConnection(PROPERTIES.getProperty("jdbc.url"));
}
protected static void createTables(Connection dbCon) throws SQLException {
Statement stmt = null;
try {
dbCon.setAutoCommit(true);
stmt = dbCon.createStatement();
try {
stmt.execute("DROP TABLE sequences");
}
catch (Exception e) {
}
stmt.execute("CREATE TABLE sequences " + "(id VARCHAR(255) NOT NULL, seq_number INT)");
stmt.executeUpdate("INSERT INTO sequences (id, seq_number) values ('id', 2)");
}
finally {
JdbcUtil.closeQuietly(stmt);
}
}
}
| |
package io.miti.db2java;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Properties;
/**
* Encapsulate database operations.
*
* @author mwallace
* @version 1.0
*/
public final class Database
{
/**
* The current database connection.
*/
private static Connection conn = null;
/**
* Whether we have attempted to retrieve a database
* connection object. If true, and conn = null,
* there was an error.
*/
private static boolean bRetrievedConnection = false;
/**
* Whether the connection is valid, if it's already been retrieved.
*/
private static boolean bConnectionValid = false;
/**
* Properties object for holding the connection information.
*/
private static Properties props = new Properties();
/**
* The properties file name.
*/
private static final String PROPS_FILE_NAME = "dbinfo.prop";
/**
* Default constructor. Make it private since this class
* does not need to be instantiated.
*/
private Database()
{
super();
}
/**
* Returns the default database connection.
*
* @return the result of the operation
*/
protected static boolean loadConnection()
{
// Check whether we've already tried to load the connection
if (bRetrievedConnection)
{
// Check for an invalid connection
if ((conn == null) || (!bConnectionValid))
{
// There's an error, so set the error flag
bRetrievedConnection = false;
return false;
}
// Connection is already loaded and valid
return true;
}
// We have not yet tried to get a connection, so
// record that we are trying now
bRetrievedConnection = true;
// Instantiate the properties object
try
{
props.load(new FileInputStream(new File(PROPS_FILE_NAME)));
}
catch (FileNotFoundException e)
{
System.err.println(e.getMessage());
}
catch (IOException e)
{
System.err.println(e.getMessage());
}
// Read the properties needed to make a database connection
final String dbHost = props.getProperty("db.server", "127.0.0.1");
final String dbName = props.getProperty("db.name", "dbname");
final String dbPort = props.getProperty("db.port", "3306");
final String dbUser = props.getProperty("db.user", "userid");
final String dbPass = props.getProperty("db.pw", "password");
// Build the connection string
StringBuffer buf = new StringBuffer(120);
buf.append("jdbc:mysql://").append(dbHost);
// Check if the port is set
if ((dbPort != null) && (dbPort.length() > 0))
{
buf.append(":").append(dbPort);
}
// Append the remainder of the connection URL (except password)
buf.append("/").append(dbName).append("?user=").append(dbUser)
.append("&password=");
// Make a copy of the buffer, without the password. We
// add a string of 8 *'s instead. This is displayed to
// the user in case of an error getting a connection.
StringBuffer bufNoPassword = new StringBuffer(buf.toString());
bufNoPassword.append("********");
// Now append the password
buf.append(dbPass);
// Debugging
// LexiconTool.debug("Connection: " + buf.toString());
// Load the driver class
if (!(loadDriver()))
{
// An error occurred
return false;
}
// Get the connection
if (!getConnection(buf.toString()))
{
// An error occurred
return false;
}
// Check the connection
if (conn == null)
{
// An error occurred; overwrite the error message/code
System.err.println("Unable to connect to the database server via " +
bufNoPassword.toString());
return false;
}
// Return the result
return true;
}
/**
* Load the MySQL JDBC driver.
*
* @return the result of the operation
*/
protected static boolean loadDriver()
{
// Set the default return value
boolean bResult = false;
// Try to load the driver
try
{
Class.forName("org.h2.Driver").newInstance();
// If we reach this point, it was successful
bResult = true;
}
catch (ClassNotFoundException cnfe)
{
System.err.println(cnfe.getMessage());
}
catch (InstantiationException ie)
{
System.err.println(ie.getMessage());
}
catch (IllegalAccessException iae)
{
System.err.println(iae.getMessage());
}
// Return the result of the operation
return bResult;
}
/**
* Gets a Connection object from the database server.
* The connection is not in auto-commit mode, so
* transactions need to be either commited or rolled
* back. Returns null on error.
*
* @param connUrl string used to get a connection
* @return the result of the operation
*/
protected static boolean getConnection(final String connUrl)
{
// The result of the operation
boolean bResult = false;
try
{
conn = DriverManager.getConnection(connUrl);
if (conn != null)
{
// Mark the connection as not autocommit
conn.setAutoCommit(false);
// Mark the connection as valid
bConnectionValid = true;
// The operation was successful
bResult = true;
}
}
catch (SQLException sqlex)
{
// Null out the connection object
conn = null;
}
// Return the connection object
return bResult;
}
/**
* Executes a database SELECT.
*
* @param sqlCmd the database ststement to execute
* @param listData will hold the retrieved data
* @param fetcher used to retrieve the selected database columns
* @return the result of the operation
*/
protected static boolean executeSelect(final String sqlCmd,
final List listData,
final FetchDatabaseRecords fetcher)
{
return executeSelect(sqlCmd, listData, fetcher, null);
}
/**
* Executes a database SELECT.
*
* @param sqlCmd the database ststement to execute
* @param listData will hold the retrieved data
* @param fetcher used to retrieve the selected database columns
* @param arrayStrings the array of strings to supply to the query
* @return the result of the operation
*/
protected static boolean executeSelect(final String sqlCmd,
final List listData,
final FetchDatabaseRecords fetcher,
final String[] arrayStrings)
{
// The return value
boolean bResult = false;
// Check the SQL command
if ((sqlCmd == null) || (sqlCmd.length() < 1))
{
return bResult;
}
// Check the list parameter
if (listData == null)
{
return bResult;
}
// Check the fetcher
if (fetcher == null)
{
return bResult;
}
// Get the connection object and check for an error
bResult = loadConnection();
if (conn == null)
{
return bResult;
}
// Execute the statement and get the returned ID
PreparedStatement stmt = null;
ResultSet rs = null;
try
{
// Create the Statement object from the connection
stmt = conn.prepareStatement(sqlCmd);
if (null != stmt)
{
// Set any strings in the SQL command
if ((arrayStrings != null) && (arrayStrings.length > 0))
{
final int nSize = arrayStrings.length;
for (int nIndex = 0; nIndex < nSize; ++nIndex)
{
stmt.setString(nIndex + 1, arrayStrings[nIndex]);
}
}
// Now execute the query and save the result set
rs = stmt.executeQuery();
// If we reached this point, no error was found
bResult = true;
// Check for a result
if (rs != null)
{
// Get the data from the result set
fetcher.getFields(rs, listData);
}
}
}
catch (SQLException sqlex)
{
System.err.println(sqlex.getMessage());
}
finally
{
// Close the ResultSet if it's not null
try
{
if (rs != null)
{
rs.close();
rs = null;
}
}
catch (SQLException sqle)
{
System.err.println(sqle.getMessage());
}
// Close the Statement if it's not null
try
{
if (stmt != null)
{
stmt.close();
stmt = null;
}
}
catch (SQLException sqle)
{
System.err.println(sqle.getMessage());
}
}
// Return the result of the operation
return bResult;
}
}
| |
/*
* EVE Swagger Interface
* An OpenAPI for EVE Online
*
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package net.troja.eve.esi.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
import java.io.Serializable;
/**
* 200 ok object
*/
@ApiModel(description = "200 ok object")
public class CorporationRolesHistoryResponse implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("changed_at")
private OffsetDateTime changedAt = null;
@JsonProperty("character_id")
private Integer characterId = null;
@JsonProperty("issuer_id")
private Integer issuerId = null;
/**
* new_role string
*/
public enum NewRolesEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
NewRolesEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static NewRolesEnum fromValue(String text) {
for (NewRolesEnum b : NewRolesEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("new_roles")
private List<NewRolesEnum> newRoles = new ArrayList<NewRolesEnum>();
/**
* old_role string
*/
public enum OldRolesEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
OldRolesEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static OldRolesEnum fromValue(String text) {
for (OldRolesEnum b : OldRolesEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("old_roles")
private List<OldRolesEnum> oldRoles = new ArrayList<OldRolesEnum>();
/**
* role_type string
*/
public enum RoleTypeEnum {
GRANTABLE_ROLES("grantable_roles"),
GRANTABLE_ROLES_AT_BASE("grantable_roles_at_base"),
GRANTABLE_ROLES_AT_HQ("grantable_roles_at_hq"),
GRANTABLE_ROLES_AT_OTHER("grantable_roles_at_other"),
ROLES("roles"),
ROLES_AT_BASE("roles_at_base"),
ROLES_AT_HQ("roles_at_hq"),
ROLES_AT_OTHER("roles_at_other");
private String value;
RoleTypeEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static RoleTypeEnum fromValue(String text) {
for (RoleTypeEnum b : RoleTypeEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("role_type")
private RoleTypeEnum roleType = null;
public CorporationRolesHistoryResponse changedAt(OffsetDateTime changedAt) {
this.changedAt = changedAt;
return this;
}
/**
* changed_at string
*
* @return changedAt
**/
@ApiModelProperty(example = "null", required = true, value = "changed_at string")
public OffsetDateTime getChangedAt() {
return changedAt;
}
public void setChangedAt(OffsetDateTime changedAt) {
this.changedAt = changedAt;
}
public CorporationRolesHistoryResponse characterId(Integer characterId) {
this.characterId = characterId;
return this;
}
/**
* The character whose roles are changed
*
* @return characterId
**/
@ApiModelProperty(example = "null", required = true, value = "The character whose roles are changed")
public Integer getCharacterId() {
return characterId;
}
public void setCharacterId(Integer characterId) {
this.characterId = characterId;
}
public CorporationRolesHistoryResponse issuerId(Integer issuerId) {
this.issuerId = issuerId;
return this;
}
/**
* ID of the character who issued this change
*
* @return issuerId
**/
@ApiModelProperty(example = "null", required = true, value = "ID of the character who issued this change")
public Integer getIssuerId() {
return issuerId;
}
public void setIssuerId(Integer issuerId) {
this.issuerId = issuerId;
}
public CorporationRolesHistoryResponse newRoles(List<NewRolesEnum> newRoles) {
this.newRoles = newRoles;
return this;
}
public CorporationRolesHistoryResponse addNewRolesItem(NewRolesEnum newRolesItem) {
this.newRoles.add(newRolesItem);
return this;
}
/**
* new_roles array
*
* @return newRoles
**/
@ApiModelProperty(example = "null", required = true, value = "new_roles array")
public List<NewRolesEnum> getNewRoles() {
return newRoles;
}
public void setNewRoles(List<NewRolesEnum> newRoles) {
this.newRoles = newRoles;
}
public CorporationRolesHistoryResponse oldRoles(List<OldRolesEnum> oldRoles) {
this.oldRoles = oldRoles;
return this;
}
public CorporationRolesHistoryResponse addOldRolesItem(OldRolesEnum oldRolesItem) {
this.oldRoles.add(oldRolesItem);
return this;
}
/**
* old_roles array
*
* @return oldRoles
**/
@ApiModelProperty(example = "null", required = true, value = "old_roles array")
public List<OldRolesEnum> getOldRoles() {
return oldRoles;
}
public void setOldRoles(List<OldRolesEnum> oldRoles) {
this.oldRoles = oldRoles;
}
public CorporationRolesHistoryResponse roleType(RoleTypeEnum roleType) {
this.roleType = roleType;
return this;
}
/**
* role_type string
*
* @return roleType
**/
@ApiModelProperty(example = "null", required = true, value = "role_type string")
public RoleTypeEnum getRoleType() {
return roleType;
}
public void setRoleType(RoleTypeEnum roleType) {
this.roleType = roleType;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CorporationRolesHistoryResponse corporationRolesHistoryResponse = (CorporationRolesHistoryResponse) o;
return Objects.equals(this.changedAt, corporationRolesHistoryResponse.changedAt)
&& Objects.equals(this.characterId, corporationRolesHistoryResponse.characterId)
&& Objects.equals(this.issuerId, corporationRolesHistoryResponse.issuerId)
&& Objects.equals(this.newRoles, corporationRolesHistoryResponse.newRoles)
&& Objects.equals(this.oldRoles, corporationRolesHistoryResponse.oldRoles)
&& Objects.equals(this.roleType, corporationRolesHistoryResponse.roleType);
}
@Override
public int hashCode() {
return Objects.hash(changedAt, characterId, issuerId, newRoles, oldRoles, roleType);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CorporationRolesHistoryResponse {\n");
sb.append(" changedAt: ").append(toIndentedString(changedAt)).append("\n");
sb.append(" characterId: ").append(toIndentedString(characterId)).append("\n");
sb.append(" issuerId: ").append(toIndentedString(issuerId)).append("\n");
sb.append(" newRoles: ").append(toIndentedString(newRoles)).append("\n");
sb.append(" oldRoles: ").append(toIndentedString(oldRoles)).append("\n");
sb.append(" roleType: ").append(toIndentedString(roleType)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
package bndtools.editor.exports;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.bndtools.api.ILogger;
import org.bndtools.api.Logger;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspaceRunnable;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.MultiStatus;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.SubMonitor;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.search.IJavaSearchScope;
import org.eclipse.jdt.core.search.SearchEngine;
import org.eclipse.jface.dialogs.ErrorDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.forms.editor.IFormPage;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.ide.ResourceUtil;
import aQute.bnd.build.Project;
import aQute.bnd.build.model.BndEditModel;
import aQute.bnd.build.model.clauses.ExportedPackage;
import aQute.bnd.header.Attrs;
import aQute.bnd.osgi.Constants;
import bndtools.Plugin;
import bndtools.central.Central;
import bndtools.editor.contents.PackageInfoDialog;
import bndtools.editor.contents.PackageInfoDialog.FileVersionTuple;
import bndtools.editor.contents.PackageInfoStyle;
import bndtools.editor.pkgpatterns.PkgPatternsListPart;
import bndtools.internal.pkgselection.IPackageFilter;
import bndtools.internal.pkgselection.JavaSearchScopePackageLister;
import bndtools.internal.pkgselection.PackageSelectionDialog;
import bndtools.preferences.BndPreferences;
public class ExportPatternsListPart extends PkgPatternsListPart<ExportedPackage> {
private static final ILogger logger = Logger.getLogger(ExportPatternsListPart.class);
public ExportPatternsListPart(Composite parent, FormToolkit toolkit, int style) {
super(parent, toolkit, style, Constants.EXPORT_PACKAGE, "Export Packages", new ExportedPackageLabelProvider());
}
@Override
protected Collection<ExportedPackage> generateClauses() {
return selectPackagesToAdd();
}
protected List<ExportedPackage> selectPackagesToAdd() {
List<ExportedPackage> added = null;
final IPackageFilter filter = packageName -> {
if (packageName.equals("java") || packageName.startsWith("java."))
return false;
// TODO: check already included patterns
return true;
};
IFormPage page = (IFormPage) getManagedForm().getContainer();
IWorkbenchWindow window = page.getEditorSite()
.getWorkbenchWindow();
// Prepare the package lister from the Java project
IProject project = ResourceUtil.getResource(page.getEditorInput())
.getProject();
IJavaProject javaProject = JavaCore.create(project);
IJavaSearchScope searchScope = SearchEngine.createJavaSearchScope(new IJavaElement[] {
javaProject
});
JavaSearchScopePackageLister packageLister = new JavaSearchScopePackageLister(searchScope, window);
// Create and open the dialog
PackageSelectionDialog dialog = new PackageSelectionDialog(window.getShell(), packageLister, filter,
"Select new packages to export from the bundle.");
dialog.setSourceOnly(true);
dialog.setMultipleSelection(true);
if (dialog.open() == Window.OK) {
Object[] results = dialog.getResult();
added = new LinkedList<>();
// Select the results
for (Object result : results) {
String newPackageName = (String) result;
ExportedPackage newPackage = new ExportedPackage(newPackageName, new Attrs());
added.add(newPackage);
}
}
return added;
}
@Override
protected void doAddClauses(Collection<? extends ExportedPackage> pkgs, int index, boolean select) {
List<FileVersionTuple> missingPkgInfoDirs;
try {
missingPkgInfoDirs = new ArrayList<>(findSourcePackagesWithoutPackageInfo(pkgs));
} catch (Exception e) {
ErrorDialog.openError(getManagedForm().getForm()
.getShell(), "Error", null,
new Status(IStatus.ERROR, Plugin.PLUGIN_ID, 0, "Error finding source package for exported 1packages.",
e));
missingPkgInfoDirs = Collections.emptyList();
}
List<FileVersionTuple> generatePkgInfoDirs = new ArrayList<>(missingPkgInfoDirs.size());
BndPreferences prefs = new BndPreferences();
boolean noAskPackageInfo = prefs.getNoAskPackageInfo();
if (noAskPackageInfo || missingPkgInfoDirs.isEmpty()) {
generatePkgInfoDirs.addAll(missingPkgInfoDirs);
} else {
PackageInfoDialog dlg = new PackageInfoDialog(getSection().getShell(), missingPkgInfoDirs);
if (dlg.open() == Window.CANCEL)
return;
prefs.setNoAskPackageInfo(dlg.isDontAsk());
generatePkgInfoDirs.addAll(dlg.getSelectedPackageDirs());
}
try {
generatePackageInfos(generatePkgInfoDirs);
} catch (CoreException e) {
ErrorDialog.openError(getManagedForm().getForm()
.getShell(), "Error", null,
new Status(IStatus.ERROR, Plugin.PLUGIN_ID, 0, "Error generated packageinfo files.", e));
}
// Actually add the new exports
super.doAddClauses(pkgs, index, select);
}
private Collection<FileVersionTuple> findSourcePackagesWithoutPackageInfo(
Collection<? extends ExportedPackage> pkgs) throws Exception {
Map<String, FileVersionTuple> result = new HashMap<>();
Project project = getProject();
if (project != null) {
Collection<File> sourceDirs = project.getSourcePath();
for (File sourceDir : sourceDirs) {
for (ExportedPackage pkg : pkgs) {
if (!result.containsKey(pkg.getName())) {
File pkgDir = new File(sourceDir, pkg.getName()
.replace('.', '/'));
if (pkgDir.isDirectory()) {
PackageInfoStyle existingPkgInfo = PackageInfoStyle.findExisting(pkgDir);
if (existingPkgInfo == null)
result.put(pkg.getName(), new FileVersionTuple(pkg.getName(), pkgDir));
}
}
}
}
}
return result.values();
}
private static void generatePackageInfos(final Collection<? extends FileVersionTuple> pkgs) throws CoreException {
final IWorkspaceRunnable wsOperation = monitor -> {
SubMonitor progress = SubMonitor.convert(monitor, pkgs.size());
MultiStatus status = new MultiStatus(Plugin.PLUGIN_ID, 0,
"Errors occurred while creating packageinfo files.", null);
for (FileVersionTuple pkg : pkgs) {
IContainer[] locations = ResourcesPlugin.getWorkspace()
.getRoot()
.findContainersForLocationURI(pkg.getFile()
.toURI());
if (locations != null && locations.length > 0) {
IContainer container = locations[0];
PackageInfoStyle packageInfoStyle = PackageInfoStyle
.calculatePackageInfoStyle(container.getProject());
IFile pkgInfoFile = container.getFile(new Path(packageInfoStyle.getFileName()));
try {
String formattedPackageInfo = packageInfoStyle.format(pkg.getVersion(), pkg.getName());
ByteArrayInputStream input = new ByteArrayInputStream(formattedPackageInfo.getBytes("UTF-8"));
if (pkgInfoFile.exists())
pkgInfoFile.setContents(input, false, true, progress.newChild(1, 0));
else
pkgInfoFile.create(input, false, progress.newChild(1, 0));
} catch (CoreException e1) {
status.add(new Status(IStatus.ERROR, Plugin.PLUGIN_ID, 0,
"Error creating file " + pkgInfoFile.getFullPath(), e1));
} catch (UnsupportedEncodingException e2) {
/* just ignore, should never happen */
}
}
}
if (!status.isOK())
throw new CoreException(status);
};
IRunnableWithProgress uiOperation = monitor -> {
try {
ResourcesPlugin.getWorkspace()
.run(wsOperation, monitor);
} catch (CoreException e) {
throw new InvocationTargetException(e);
}
};
try {
PlatformUI.getWorkbench()
.getActiveWorkbenchWindow()
.run(true, true, uiOperation);
} catch (InvocationTargetException e) {
throw (CoreException) e.getTargetException();
} catch (InterruptedException e) {
// ignore
}
}
@Override
protected ExportedPackage newHeaderClause(String text) {
return new ExportedPackage(text, new Attrs());
}
@Override
protected List<ExportedPackage> loadFromModel(BndEditModel model) {
return model.getExportedPackages();
}
@Override
protected void saveToModel(BndEditModel model, List<? extends ExportedPackage> clauses) {
model.setExportedPackages(clauses);
}
Project getProject() {
Project project = null;
try {
BndEditModel model = (BndEditModel) getManagedForm().getInput();
File bndFile = model.getBndResource();
IPath path = Central.toPath(bndFile);
IFile resource = ResourcesPlugin.getWorkspace()
.getRoot()
.getFile(path);
project = Central.getProject(resource.getProject());
} catch (Exception e) {
logger.logError("Error getting project from editor model", e);
}
return project;
}
}
| |
package bio.terra.workspace.service.resource.controlled.cloud.gcp.ainotebook;
import static bio.terra.workspace.service.workspace.flight.WorkspaceFlightMapKeys.ControlledResourceKeys.CREATE_NOTEBOOK_NETWORK_NAME;
import static bio.terra.workspace.service.workspace.flight.WorkspaceFlightMapKeys.ControlledResourceKeys.CREATE_NOTEBOOK_PARAMETERS;
import static bio.terra.workspace.service.workspace.flight.WorkspaceFlightMapKeys.ControlledResourceKeys.CREATE_NOTEBOOK_REGION;
import static bio.terra.workspace.service.workspace.flight.WorkspaceFlightMapKeys.ControlledResourceKeys.CREATE_NOTEBOOK_SUBNETWORK_NAME;
import bio.terra.cloudres.google.api.services.common.OperationCow;
import bio.terra.cloudres.google.notebooks.AIPlatformNotebooksCow;
import bio.terra.cloudres.google.notebooks.InstanceName;
import bio.terra.common.exception.BadRequestException;
import bio.terra.stairway.FlightContext;
import bio.terra.stairway.FlightMap;
import bio.terra.stairway.Step;
import bio.terra.stairway.StepResult;
import bio.terra.stairway.StepStatus;
import bio.terra.stairway.exception.RetryException;
import bio.terra.workspace.common.utils.GcpUtils;
import bio.terra.workspace.generated.model.ApiGcpAiNotebookInstanceAcceleratorConfig;
import bio.terra.workspace.generated.model.ApiGcpAiNotebookInstanceContainerImage;
import bio.terra.workspace.generated.model.ApiGcpAiNotebookInstanceCreationParameters;
import bio.terra.workspace.generated.model.ApiGcpAiNotebookInstanceVmImage;
import bio.terra.workspace.service.crl.CrlService;
import bio.terra.workspace.service.workspace.flight.WorkspaceFlightMapKeys.ControlledResourceKeys;
import bio.terra.workspace.service.workspace.model.GcpCloudContext;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.services.notebooks.v1.model.AcceleratorConfig;
import com.google.api.services.notebooks.v1.model.ContainerImage;
import com.google.api.services.notebooks.v1.model.Instance;
import com.google.api.services.notebooks.v1.model.Operation;
import com.google.api.services.notebooks.v1.model.VmImage;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.time.Duration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
/**
* A step for creating the AI Platform notebook instance in the Google cloud.
*
* <p>Undo deletes the created notebook instance.
*/
public class CreateAiNotebookInstanceStep implements Step {
/** Default post-startup-script when starting a notebook instance. */
protected static final String DEFAULT_POST_STARTUP_SCRIPT =
"https://raw.githubusercontent.com/DataBiosphere/terra-workspace-manager/main/service/src/main/java/bio/terra/workspace/service/resource/controlled/cloud/gcp/ainotebook/post-startup.sh";
/** The Notebook instance metadata key used to control proxy mode. */
private static final String PROXY_MODE_METADATA_KEY = "proxy-mode";
/** The Notebook instance metadata value used to set the service account proxy mode. */
// git secrets gets a false positive if 'service_account' is double quoted.
private static final String PROXY_MODE_SA_VALUE = "service_" + "account";
/**
* Service account for the notebook instance needs to contain these scopes to interact with SAM.
*/
private static final List<String> SERVICE_ACCOUNT_SCOPES =
ImmutableList.of(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/userinfo.email",
"https://www.googleapis.com/auth/userinfo.profile");
private final Logger logger = LoggerFactory.getLogger(CreateAiNotebookInstanceStep.class);
private final ControlledAiNotebookInstanceResource resource;
private final String petEmail;
private final CrlService crlService;
public CreateAiNotebookInstanceStep(
ControlledAiNotebookInstanceResource resource, String petEmail, CrlService crlService) {
this.petEmail = petEmail;
this.resource = resource;
this.crlService = crlService;
}
@Override
public StepResult doStep(FlightContext flightContext)
throws InterruptedException, RetryException {
final GcpCloudContext gcpCloudContext =
flightContext
.getWorkingMap()
.get(ControlledResourceKeys.GCP_CLOUD_CONTEXT, GcpCloudContext.class);
String projectId = gcpCloudContext.getGcpProjectId();
InstanceName instanceName = resource.toInstanceName(projectId);
Instance instance = createInstanceModel(flightContext, projectId, petEmail);
AIPlatformNotebooksCow notebooks = crlService.getAIPlatformNotebooksCow();
try {
OperationCow<Operation> creationOperation;
try {
creationOperation =
notebooks
.operations()
.operationCow(notebooks.instances().create(instanceName, instance).execute());
} catch (GoogleJsonResponseException e) {
// If the instance already exists, this step must have already run successfully. Otherwise
// retry.
if (HttpStatus.CONFLICT.value() == e.getStatusCode()) {
logger.debug("Notebook instance {} already created.", instanceName.formatName());
return StepResult.getStepResultSuccess();
} else if (HttpStatus.BAD_REQUEST.value() == e.getStatusCode()) {
// Don't retry bad requests, which won't change. Instead fail faster.
return new StepResult(StepStatus.STEP_RESULT_FAILURE_FATAL, e);
}
return new StepResult(StepStatus.STEP_RESULT_FAILURE_RETRY, e);
}
GcpUtils.pollUntilSuccess(creationOperation, Duration.ofSeconds(20), Duration.ofMinutes(12));
} catch (IOException e) {
return new StepResult(StepStatus.STEP_RESULT_FAILURE_RETRY, e);
}
return StepResult.getStepResultSuccess();
}
private static Instance createInstanceModel(
FlightContext flightContext, String projectId, String serviceAccountEmail) {
Instance instance = new Instance();
ApiGcpAiNotebookInstanceCreationParameters creationParameters =
flightContext
.getInputParameters()
.get(CREATE_NOTEBOOK_PARAMETERS, ApiGcpAiNotebookInstanceCreationParameters.class);
setFields(creationParameters, serviceAccountEmail, instance);
setNetworks(instance, projectId, flightContext.getWorkingMap());
return instance;
}
@VisibleForTesting
static Instance setFields(
ApiGcpAiNotebookInstanceCreationParameters creationParameters,
String serviceAccountEmail,
Instance instance) {
instance
.setPostStartupScript(
Optional.ofNullable(creationParameters.getPostStartupScript())
.orElse(DEFAULT_POST_STARTUP_SCRIPT))
.setMachineType(creationParameters.getMachineType())
.setInstallGpuDriver(creationParameters.isInstallGpuDriver())
.setCustomGpuDriverPath(creationParameters.getCustomGpuDriverPath())
.setBootDiskType(creationParameters.getBootDiskType())
.setBootDiskSizeGb(creationParameters.getBootDiskSizeGb())
.setDataDiskType(creationParameters.getDataDiskType())
.setDataDiskSizeGb(creationParameters.getDataDiskSizeGb());
Map<String, String> metadata = new HashMap<>();
Optional.ofNullable(creationParameters.getMetadata()).ifPresent(metadata::putAll);
// Create the AI Notebook instance in the service account proxy mode to control proxy access by
// means of IAM permissions on the service account.
// https://cloud.google.com/ai-platform/notebooks/docs/troubleshooting#opening_a_notebook_results_in_a_403_forbidden_error
if (metadata.put(PROXY_MODE_METADATA_KEY, PROXY_MODE_SA_VALUE) != null) {
throw new BadRequestException("proxy-mode metadata is reserved for Terra.");
}
instance.setMetadata(metadata);
instance.setServiceAccount(serviceAccountEmail);
instance.setServiceAccountScopes(SERVICE_ACCOUNT_SCOPES);
ApiGcpAiNotebookInstanceAcceleratorConfig acceleratorConfig =
creationParameters.getAcceleratorConfig();
if (acceleratorConfig != null) {
instance.setAcceleratorConfig(
new AcceleratorConfig()
.setType(acceleratorConfig.getType())
.setCoreCount(acceleratorConfig.getCoreCount()));
}
ApiGcpAiNotebookInstanceVmImage vmImageParameters = creationParameters.getVmImage();
if (vmImageParameters != null) {
instance.setVmImage(
new VmImage()
.setProject(vmImageParameters.getProjectId())
.setImageFamily(vmImageParameters.getImageFamily())
.setImageName(vmImageParameters.getImageName()));
}
ApiGcpAiNotebookInstanceContainerImage containerImageParameters =
creationParameters.getContainerImage();
if (containerImageParameters != null) {
instance.setContainerImage(
new ContainerImage()
.setRepository(containerImageParameters.getRepository())
.setTag(containerImageParameters.getTag()));
}
return instance;
}
private static void setNetworks(Instance instance, String projectId, FlightMap workingMap) {
String region = workingMap.get(CREATE_NOTEBOOK_REGION, String.class);
String networkName = workingMap.get(CREATE_NOTEBOOK_NETWORK_NAME, String.class);
String subnetworkName = workingMap.get(CREATE_NOTEBOOK_SUBNETWORK_NAME, String.class);
instance.setNetwork("projects/" + projectId + "/global/networks/" + networkName);
instance.setSubnet(
"projects/" + projectId + "/regions/" + region + "/subnetworks/" + subnetworkName);
}
@Override
public StepResult undoStep(FlightContext flightContext) throws InterruptedException {
final GcpCloudContext gcpCloudContext =
flightContext
.getWorkingMap()
.get(ControlledResourceKeys.GCP_CLOUD_CONTEXT, GcpCloudContext.class);
InstanceName instanceName = resource.toInstanceName(gcpCloudContext.getGcpProjectId());
AIPlatformNotebooksCow notebooks = crlService.getAIPlatformNotebooksCow();
try {
OperationCow<Operation> deletionOperation;
try {
deletionOperation =
notebooks
.operations()
.operationCow(notebooks.instances().delete(instanceName).execute());
} catch (GoogleJsonResponseException e) {
// The AI notebook instance may never have been created or have already been deleted.
if (e.getStatusCode() == HttpStatus.NOT_FOUND.value()) {
logger.debug("No notebook instance {} to delete.", instanceName.formatName());
return StepResult.getStepResultSuccess();
}
return new StepResult(StepStatus.STEP_RESULT_FAILURE_RETRY, e);
}
GcpUtils.pollUntilSuccess(deletionOperation, Duration.ofSeconds(20), Duration.ofMinutes(12));
} catch (IOException | RetryException e) {
return new StepResult(StepStatus.STEP_RESULT_FAILURE_RETRY, e);
}
return StepResult.getStepResultSuccess();
}
}
| |
package uk.nhs.ciao.docs.parser.route;
import static org.apache.camel.builder.ExpressionBuilder.append;
import static org.apache.camel.builder.PredicateBuilder.*;
import static uk.nhs.ciao.logging.CiaoCamelLogMessage.camelLogMsg;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import uk.nhs.ciao.camel.BaseRouteBuilder;
import uk.nhs.ciao.logging.CiaoCamelLogger;
import uk.nhs.ciao.util.Clock;
import uk.nhs.ciao.util.SystemClock;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
/**
* Provides methods to manage the in-progress folders associated with a document upload.
* <p>
* The in-progress folder has the following structure:
* <p>
* <pre>
* source/
* ${documentName} - original document content
* control/
* error-folder - single line file specifying location of error folder
* completed-folder - single line file specifying location of completed folder
* wants-bus-ack - empty file, present if an ITK business ack has been requested
* wants-inf-ack - empty file, present if an ITK infrastructure ack has been requested
* state/
* ${timestamp}-${messageType}-${eventType} - file contains the message associated with the event
* </pre>
* <p>
* State event types:
* <li><code>preparation-failed</code> - a message/document has failed preparation
* <li><code>sending</code> - a message is being sent
* <li><code>sent</code> - a message was successfully sent
* <li><code>send-failed</code> - a message failed to send
* <li><code>received</code> - a message was received
* <p>
* State message types:
* <ul>
* <li><code>document</code> - file related to the original source document
* <li><code>bus-message</code> - file containing an ITK business message
* <li><code>inf-ack</code> - file containing an ITK infrastructure ack
* <li><code>bus-ack</code> - file containing an ITK business ack
* <li><code>inf-nack</code> - file containing an ITK infrastructure nack
* <li><code>bus-nack</code> - file containing an ITK business nack
* </ul>
* <p>
* The timestamp format is <code>yyyyMMdd-HHmmssSSS</code> (in UTC)
*/
public class InProgressFolderManagerRoute extends BaseRouteBuilder {
private static final CiaoCamelLogger LOGGER = CiaoCamelLogger.getLogger(InProgressFolderManagerRoute.class);
private static final DateTimeFormatter TIMESTAMP_FORMAT = DateTimeFormat.forPattern("yyyyMMdd-HHmmssSSS").withZoneUTC();
/**
* Enumeration of header names supported by the manager
*/
public static final class Header {
public static final String ACTION = "ciao.inProgressFolder.action";
public static final String FILE_TYPE = "ciao.inProgressFolder.fileType";
public static final String EVENT_TYPE = "ciao.inProgressFolder.eventType";
// Internal (private) headers
private static final String MESSAGE_TYPE = "ciao.inProgressFolder.messageType";
private Header() {
// Suppress default constructor
}
}
/**
* Enumeration of action header values supported by the manager
*
* @see Header#ACTION
*/
public static final class Action {
public static final String STORE = "store";
private Action() {
// Suppress default constructor
}
}
/**
* Enumeration of file type header values supported by the manager
*
* @see Header#FILE_TYPE
*/
public static final class FileType {
public static final String CONTROL = "control";
public static final String EVENT = "event";
private FileType() {
// Suppress default constructor
}
}
/**
* Enumeration of event type header values supported by the manager
*
* @see Header#EVENT_TYPE
*/
public static final class EventType {
public static final String MESSAGE_PREPARATION_FAILED = "preparation-failed";
public static final String MESSAGE_SENDING = "sending";
public static final String MESSAGE_SENT = "sent";
public static final String MESSAGE_SEND_FAILED = "send-failed";
public static final String MESSAGE_RECEIVED = "received";
private EventType() {
// Suppress default constructor
}
}
/**
* Enumeration of known message types
* <p>
* Values are supplied via the {@link Exchange#FILE_NAME} header
*/
public static final class MessageType {
public static final String DOCUMENT = "document";
public static final String BUSINESS_MESSAGE = "bus-message";
public static final String INFRASTRUCTURE_ACK = "inf-ack";
public static final String INFRASTRUCTURE_NACK = "inf-nack";
public static final String BUSINESS_ACK = "bus-ack";
public static final String BUSINESS_NACK = "bus-nack";
private MessageType() {
// Suppress default constructor
}
}
private String inProgressFolderManagerUri;
private String inProgressFolderRootUri;
private Clock clock = SystemClock.getInstance();
public void setInProgressFolderManagerUri(final String inProgressFolderManagerUri) {
this.inProgressFolderManagerUri = inProgressFolderManagerUri;
}
public void setInProgressFolderRootUri(final String inProgressFolderRootUri) {
this.inProgressFolderRootUri = inProgressFolderRootUri;
}
public void setClock(final Clock clock) {
this.clock = Preconditions.checkNotNull(clock);
}
private String getStoreEventFileUri() {
return internalDirectUri("store-event-file");
}
private String getStoreEventFileHandlerUri() {
return internalDirectUri("store-event-file-handler");
}
private String getStoreControlFileUri() {
return internalDirectUri("store-control-file");
}
@Override
public void configure() throws Exception {
configureRouter();
configureStoreControlFileRoute();
configureStoreEventFileRoute();
configureStoreEventFileHandlerRoute();
}
private void configureRouter() throws Exception {
from(inProgressFolderManagerUri)
.choice()
.when(isEqualTo(header(Header.ACTION), constant(Action.STORE)))
.pipeline()
.choice()
.when(isEqualTo(header(Header.FILE_TYPE), constant(FileType.CONTROL)))
.to(getStoreControlFileUri())
.endChoice()
.when(isEqualTo(header(Header.FILE_TYPE), constant(FileType.EVENT)))
.to(getStoreEventFileUri())
.endChoice()
.end()
.end()
.endChoice()
.end()
.end();
}
private void configureStoreControlFileRoute() throws Exception {
from(getStoreControlFileUri())
.bean(new ControlFileNameCalculator())
.to(inProgressFolderRootUri + "?fileExist=Override")
.process(LOGGER.info(camelLogMsg("Stored control file in in-progress folder")
.documentId(header(Exchange.CORRELATION_ID))
.fileName(header(Exchange.FILE_NAME))))
.end();
}
private void configureStoreEventFileRoute() throws Exception {
from(getStoreEventFileUri())
.errorHandler(defaultErrorHandler()
.maximumRedeliveries(10)
.redeliveryDelay(1))
.to(getStoreEventFileHandlerUri())
.process(LOGGER.info(camelLogMsg("Stored event file in in-progress folder")
.documentId(header(Exchange.CORRELATION_ID))
.eventName(append(append(header(Header.MESSAGE_TYPE), constant("-")), header(Header.EVENT_TYPE)))
.fileName(header(Exchange.FILE_NAME))))
.end();
}
private void configureStoreEventFileHandlerRoute() throws Exception {
from(getStoreEventFileHandlerUri())
.errorHandler(noErrorHandler())
.bean(new EventFileNameCalculator())
.to(inProgressFolderRootUri + "?fileExist=Fail")
.end();
}
// Processor / bean methods
// The methods can't live in the route builder - it causes havoc with the debug/tracer logging
public class ControlFileNameCalculator {
public void calculateFileName(final Message message) throws Exception {
final String originalName = Strings.nullToEmpty(message.getHeader(Exchange.FILE_NAME, String.class));
final String id = message.getHeader(Exchange.CORRELATION_ID, String.class);
if (Strings.isNullOrEmpty(id)) {
throw new Exception("Missing header " + Exchange.CORRELATION_ID);
}
final String fileName = id + "/control/" + originalName;
message.setHeader(Exchange.FILE_NAME, fileName);
}
}
public class EventFileNameCalculator {
public void calculateFileName(final Message message) throws Exception {
final String messageType = Strings.nullToEmpty(message.getHeader(Exchange.FILE_NAME, String.class));
message.setHeader(Header.MESSAGE_TYPE, messageType);
final String id = message.getHeader(Exchange.CORRELATION_ID, String.class);
if (Strings.isNullOrEmpty(id)) {
throw new Exception("Missing header " + Exchange.CORRELATION_ID);
}
final String eventType = message.getHeader(Header.EVENT_TYPE, String.class);
if (Strings.isNullOrEmpty(eventType)) {
throw new Exception("Missing header " + Header.EVENT_TYPE);
}
final String timestamp = TIMESTAMP_FORMAT.print(clock.getMillis());
final String fileName = id + "/events/" + timestamp + "-" + messageType + "-" + eventType;
message.setHeader(Exchange.FILE_NAME, fileName);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector.complex;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Collections.singletonList;
import java.util.ArrayList;
import java.util.List;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ObjectArrays;
import io.netty.buffer.ArrowBuf;
import org.apache.arrow.memory.BaseAllocator;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.OutOfMemoryException;
import org.apache.arrow.vector.*;
import org.apache.arrow.vector.complex.impl.ComplexCopier;
import org.apache.arrow.vector.complex.impl.UnionListReader;
import org.apache.arrow.vector.complex.impl.UnionListWriter;
import org.apache.arrow.vector.complex.reader.FieldReader;
import org.apache.arrow.vector.complex.writer.FieldWriter;
import org.apache.arrow.vector.ipc.message.ArrowFieldNode;
import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.DictionaryEncoding;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.CallBack;
import org.apache.arrow.vector.util.JsonStringArrayList;
import org.apache.arrow.vector.util.OversizedAllocationException;
import org.apache.arrow.vector.util.TransferPair;
public class ListVector extends BaseRepeatedValueVector implements FieldVector, PromotableVector {
public static ListVector empty(String name, BufferAllocator allocator) {
return new ListVector(name, allocator, FieldType.nullable(ArrowType.List.INSTANCE), null);
}
protected ArrowBuf validityBuffer;
private UnionListReader reader;
private CallBack callBack;
private final FieldType fieldType;
private int validityAllocationSizeInBytes;
private int lastSet;
// deprecated, use FieldType or static constructor instead
@Deprecated
public ListVector(String name, BufferAllocator allocator, CallBack callBack) {
this(name, allocator, FieldType.nullable(ArrowType.List.INSTANCE), callBack);
}
// deprecated, use FieldType or static constructor instead
@Deprecated
public ListVector(String name, BufferAllocator allocator, DictionaryEncoding dictionary, CallBack callBack) {
this(name, allocator, new FieldType(true, ArrowType.List.INSTANCE, dictionary, null), callBack);
}
public ListVector(String name, BufferAllocator allocator, FieldType fieldType, CallBack callBack) {
super(name, allocator, callBack);
this.validityBuffer = allocator.getEmpty();
this.reader = new UnionListReader(this);
this.fieldType = checkNotNull(fieldType);
this.callBack = callBack;
this.validityAllocationSizeInBytes = getValidityBufferSizeFromCount(INITIAL_VALUE_ALLOCATION);
this.lastSet = 0;
}
@Override
public void initializeChildrenFromFields(List<Field> children) {
if (children.size() != 1) {
throw new IllegalArgumentException("Lists have only one child. Found: " + children);
}
Field field = children.get(0);
AddOrGetResult<FieldVector> addOrGetVector = addOrGetVector(field.getFieldType());
if (!addOrGetVector.isCreated()) {
throw new IllegalArgumentException("Child vector already existed: " + addOrGetVector.getVector());
}
addOrGetVector.getVector().initializeChildrenFromFields(field.getChildren());
}
@Override
public void setInitialCapacity(int numRecords) {
validityAllocationSizeInBytes = getValidityBufferSizeFromCount(numRecords);
super.setInitialCapacity(numRecords);
}
/**
* Specialized version of setInitialCapacity() for ListVector. This is
* used by some callers when they want to explicitly control and be
* conservative about memory allocated for inner data vector. This is
* very useful when we are working with memory constraints for a query
* and have a fixed amount of memory reserved for the record batch. In
* such cases, we are likely to face OOM or related problems when
* we reserve memory for a record batch with value count x and
* do setInitialCapacity(x) such that each vector allocates only
* what is necessary and not the default amount but the multiplier
* forces the memory requirement to go beyond what was needed.
*
* @param numRecords value count
* @param density density of ListVector. Density is the average size of
* list per position in the List vector. For example, a
* density value of 10 implies each position in the list
* vector has a list of 10 values.
* A density value of 0.1 implies out of 10 positions in
* the list vector, 1 position has a list of size 1 and
* remaining positions are null (no lists) or empty lists.
* This helps in tightly controlling the memory we provision
* for inner data vector.
*/
@Override
public void setInitialCapacity(int numRecords, double density) {
validityAllocationSizeInBytes = getValidityBufferSizeFromCount(numRecords);
super.setInitialCapacity(numRecords, density);
}
/**
* Get the density of this ListVector
* @return density
*/
public double getDensity() {
if (valueCount == 0) {
return 0.0D;
}
final int startOffset = offsetBuffer.getInt(0);
final int endOffset = offsetBuffer.getInt(valueCount * OFFSET_WIDTH);
final double totalListSize = endOffset - startOffset;
return totalListSize/valueCount;
}
@Override
public List<FieldVector> getChildrenFromFields() {
return singletonList(getDataVector());
}
/**
* Load the buffers of this vector with provided source buffers.
* The caller manages the source buffers and populates them before invoking
* this method.
* @param fieldNode the fieldNode indicating the value count
* @param ownBuffers the buffers for this Field (own buffers only, children not included)
*/
@Override
public void loadFieldBuffers(ArrowFieldNode fieldNode, List<ArrowBuf> ownBuffers) {
if (ownBuffers.size() != 2) {
throw new IllegalArgumentException("Illegal buffer count, expected " + 2 + ", got: " + ownBuffers.size());
}
ArrowBuf bitBuffer = ownBuffers.get(0);
ArrowBuf offBuffer = ownBuffers.get(1);
validityBuffer.release();
validityBuffer = BitVectorHelper.loadValidityBuffer(fieldNode, bitBuffer, allocator);
offsetBuffer.release();
offsetBuffer = offBuffer.retain(allocator);
validityAllocationSizeInBytes = validityBuffer.capacity();
offsetAllocationSizeInBytes = offsetBuffer.capacity();
lastSet = fieldNode.getLength();
valueCount = fieldNode.getLength();
}
/**
* Get the buffers belonging to this vector
* @return the inner buffers.
*/
@Override
public List<ArrowBuf> getFieldBuffers() {
List<ArrowBuf> result = new ArrayList<>(2);
setReaderAndWriterIndex();
result.add(validityBuffer);
result.add(offsetBuffer);
return result;
}
/**
* Set the reader and writer indexes for the inner buffers.
*/
private void setReaderAndWriterIndex() {
validityBuffer.readerIndex(0);
offsetBuffer.readerIndex(0);
if (valueCount == 0) {
validityBuffer.writerIndex(0);
offsetBuffer.writerIndex(0);
} else {
validityBuffer.writerIndex(getValidityBufferSizeFromCount(valueCount));
offsetBuffer.writerIndex((valueCount + 1) * OFFSET_WIDTH);
}
}
@Override
@Deprecated
public List<BufferBacked> getFieldInnerVectors() {
throw new UnsupportedOperationException("There are no inner vectors. Use getFieldBuffers");
}
public UnionListWriter getWriter() {
return new UnionListWriter(this);
}
/**
* Same as {@link #allocateNewSafe()}.
*/
@Override
public void allocateNew() throws OutOfMemoryException {
if (!allocateNewSafe()) {
throw new OutOfMemoryException("Failure while allocating memory");
}
}
/**
* Allocate memory for the vector. We internally use a default value count
* of 4096 to allocate memory for at least these many elements in the
* vector.
*
* @return false if memory allocation fails, true otherwise.
*/
public boolean allocateNewSafe() {
boolean success = false;
try {
/* we are doing a new allocation -- release the current buffers */
clear();
/* allocate validity buffer */
allocateValidityBuffer(validityAllocationSizeInBytes);
/* allocate offset and data buffer */
success = super.allocateNewSafe();
} finally {
if (!success) {
clear();
return false;
}
}
return true;
}
private void allocateValidityBuffer(final long size) {
final int curSize = (int) size;
validityBuffer = allocator.buffer(curSize);
validityBuffer.readerIndex(0);
validityAllocationSizeInBytes = curSize;
validityBuffer.setZero(0, validityBuffer.capacity());
}
/**
* Resize the vector to increase the capacity. The internal behavior is to
* double the current value capacity.
*/
@Override
public void reAlloc() {
/* reallocate the validity buffer */
reallocValidityBuffer();
/* reallocate the offset and data */
super.reAlloc();
}
private void reallocValidityAndOffsetBuffers() {
reallocOffsetBuffer();
reallocValidityBuffer();
}
private void reallocValidityBuffer() {
final int currentBufferCapacity = validityBuffer.capacity();
long baseSize = validityAllocationSizeInBytes;
if (baseSize < (long) currentBufferCapacity) {
baseSize = (long) currentBufferCapacity;
}
long newAllocationSize = baseSize * 2L;
newAllocationSize = BaseAllocator.nextPowerOfTwo(newAllocationSize);
assert newAllocationSize >= 1;
if (newAllocationSize > MAX_ALLOCATION_SIZE) {
throw new OversizedAllocationException("Unable to expand the buffer");
}
final ArrowBuf newBuf = allocator.buffer((int) newAllocationSize);
newBuf.setBytes(0, validityBuffer, 0, currentBufferCapacity);
newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity);
validityBuffer.release(1);
validityBuffer = newBuf;
validityAllocationSizeInBytes = (int) newAllocationSize;
}
/**
* Same as {@link #copyFrom(int, int, ListVector)} except that
* it handles the case when the capacity of the vector needs to be expanded
* before copy.
* @param inIndex position to copy from in source vector
* @param outIndex position to copy to in this vector
* @param from source vector
*/
public void copyFromSafe(int inIndex, int outIndex, ListVector from) {
copyFrom(inIndex, outIndex, from);
}
/**
* Copy a cell value from a particular index in source vector to a particular
* position in this vector
* @param inIndex position to copy from in source vector
* @param outIndex position to copy to in this vector
* @param from source vector
*/
public void copyFrom(int inIndex, int outIndex, ListVector from) {
FieldReader in = from.getReader();
in.setPosition(inIndex);
FieldWriter out = getWriter();
out.setPosition(outIndex);
ComplexCopier.copy(in, out);
}
/**
* Get the inner data vector for this list vector
* @return data vector
*/
@Override
public FieldVector getDataVector() {
return vector;
}
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator) {
return getTransferPair(ref, allocator, null);
}
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) {
return new TransferImpl(ref, allocator, callBack);
}
@Override
public TransferPair makeTransferPair(ValueVector target) {
return new TransferImpl((ListVector) target);
}
@Override
public long getValidityBufferAddress() {
return (validityBuffer.memoryAddress());
}
@Override
public long getDataBufferAddress() {
throw new UnsupportedOperationException();
}
@Override
public long getOffsetBufferAddress() {
return (offsetBuffer.memoryAddress());
}
@Override
public ArrowBuf getValidityBuffer() {
return validityBuffer;
}
@Override
public ArrowBuf getDataBuffer() {
throw new UnsupportedOperationException();
}
@Override
public ArrowBuf getOffsetBuffer() {
return offsetBuffer;
}
private class TransferImpl implements TransferPair {
ListVector to;
TransferPair dataTransferPair;
public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) {
this(new ListVector(name, allocator, fieldType, callBack));
}
public TransferImpl(ListVector to) {
this.to = to;
to.addOrGetVector(vector.getField().getFieldType());
if (to.getDataVector() instanceof ZeroVector) {
to.addOrGetVector(vector.getField().getFieldType());
}
dataTransferPair = getDataVector().makeTransferPair(to.getDataVector());
}
/**
* Transfer this vector'data to another vector. The memory associated
* with this vector is transferred to the allocator of target vector
* for accounting and management purposes.
*/
@Override
public void transfer() {
to.clear();
dataTransferPair.transfer();
to.validityBuffer = validityBuffer.transferOwnership(to.allocator).buffer;
to.offsetBuffer = offsetBuffer.transferOwnership(to.allocator).buffer;
to.lastSet = lastSet;
if (valueCount > 0) {
to.setValueCount(valueCount);
}
clear();
}
/**
* Slice this vector at desired index and length and transfer the
* corresponding data to the target vector.
* @param startIndex start position of the split in source vector.
* @param length length of the split.
*/
@Override
public void splitAndTransfer(int startIndex, int length) {
final int startPoint = offsetBuffer.getInt(startIndex * OFFSET_WIDTH);
final int sliceLength = offsetBuffer.getInt((startIndex + length) * OFFSET_WIDTH) - startPoint;
to.clear();
to.allocateOffsetBuffer((length + 1) * OFFSET_WIDTH);
/* splitAndTransfer offset buffer */
for (int i = 0; i < length + 1; i++) {
final int relativeOffset = offsetBuffer.getInt((startIndex + i) * OFFSET_WIDTH) - startPoint;
to.offsetBuffer.setInt(i * OFFSET_WIDTH, relativeOffset);
}
/* splitAndTransfer validity buffer */
splitAndTransferValidityBuffer(startIndex, length, to);
/* splitAndTransfer data buffer */
dataTransferPair.splitAndTransfer(startPoint, sliceLength);
to.lastSet = length;
to.setValueCount(length);
}
/*
* transfer the validity.
*/
private void splitAndTransferValidityBuffer(int startIndex, int length, ListVector target) {
assert startIndex + length <= valueCount;
int firstByteSource = BitVectorHelper.byteIndex(startIndex);
int lastByteSource = BitVectorHelper.byteIndex(valueCount - 1);
int byteSizeTarget = getValidityBufferSizeFromCount(length);
int offset = startIndex % 8;
if (length > 0) {
if (offset == 0) {
// slice
if (target.validityBuffer != null) {
target.validityBuffer.release();
}
target.validityBuffer = validityBuffer.slice(firstByteSource, byteSizeTarget);
target.validityBuffer.retain(1);
} else {
/* Copy data
* When the first bit starts from the middle of a byte (offset != 0),
* copy data from src BitVector.
* Each byte in the target is composed by a part in i-th byte,
* another part in (i+1)-th byte.
*/
target.allocateValidityBuffer(byteSizeTarget);
for (int i = 0; i < byteSizeTarget - 1; i++) {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer, firstByteSource + i, offset);
byte b2 = BitVectorHelper.getBitsFromNextByte(validityBuffer, firstByteSource + i + 1, offset);
target.validityBuffer.setByte(i, (b1 + b2));
}
/* Copying the last piece is done in the following manner:
* if the source vector has 1 or more bytes remaining, we copy
* the last piece as a byte formed by shifting data
* from the current byte and the next byte.
*
* if the source vector has no more bytes remaining
* (we are at the last byte), we copy the last piece as a byte
* by shifting data from the current byte.
*/
if ((firstByteSource + byteSizeTarget - 1) < lastByteSource) {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer,
firstByteSource + byteSizeTarget - 1, offset);
byte b2 = BitVectorHelper.getBitsFromNextByte(validityBuffer,
firstByteSource + byteSizeTarget, offset);
target.validityBuffer.setByte(byteSizeTarget - 1, b1 + b2);
} else {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer,
firstByteSource + byteSizeTarget - 1, offset);
target.validityBuffer.setByte(byteSizeTarget - 1, b1);
}
}
}
}
@Override
public ValueVector getTo() {
return to;
}
@Override
public void copyValueSafe(int from, int to) {
this.to.copyFrom(from, to, ListVector.this);
}
}
@Override
public UnionListReader getReader() {
return reader;
}
public <T extends ValueVector> AddOrGetResult<T> addOrGetVector(FieldType fieldType) {
AddOrGetResult<T> result = super.addOrGetVector(fieldType);
reader = new UnionListReader(this);
return result;
}
/**
* Get the size (number of bytes) of underlying buffers used by this
* vector
* @return size of underlying buffers.
*/
@Override
public int getBufferSize() {
if (valueCount == 0) {
return 0;
}
final int offsetBufferSize = (valueCount + 1) * OFFSET_WIDTH;
final int validityBufferSize = getValidityBufferSizeFromCount(valueCount);
return offsetBufferSize + validityBufferSize + vector.getBufferSize();
}
@Override
public Field getField() {
return new Field(name, fieldType, ImmutableList.of(getDataVector().getField()));
}
@Override
public MinorType getMinorType() {
return MinorType.LIST;
}
@Override
public void clear() {
super.clear();
validityBuffer = releaseBuffer(validityBuffer);
lastSet = 0;
}
@Override
public void reset() {
super.reset();
validityBuffer.setZero(0, validityBuffer.capacity());
lastSet = 0;
}
/**
* Return the underlying buffers associated with this vector. Note that this doesn't
* impact the reference counts for this buffer so it only should be used for in-context
* access. Also note that this buffer changes regularly thus
* external classes shouldn't hold a reference to it (unless they change it).
*
* @param clear Whether to clear vector before returning; the buffers will still be refcounted
* but the returned array will be the only reference to them
* @return The underlying {@link io.netty.buffer.ArrowBuf buffers} that is used by this
* vector instance.
*/
@Override
public ArrowBuf[] getBuffers(boolean clear) {
setReaderAndWriterIndex();
final ArrowBuf[] buffers;
if (getBufferSize() == 0) {
buffers = new ArrowBuf[0];
} else {
buffers = ObjectArrays.concat(new ArrowBuf[]{offsetBuffer},
ObjectArrays.concat(new ArrowBuf[]{validityBuffer},
vector.getBuffers(false), ArrowBuf.class), ArrowBuf.class);
}
if (clear) {
for (ArrowBuf buffer : buffers) {
buffer.retain();
}
clear();
}
return buffers;
}
@Override
public UnionVector promoteToUnion() {
UnionVector vector = new UnionVector("$data$", allocator, callBack);
replaceDataVector(vector);
reader = new UnionListReader(this);
if (callBack != null) {
callBack.doWork();
}
return vector;
}
/**
* Get the element in the list vector at a particular index
* @param index position of the element
* @return Object at given position
*/
@Override
public Object getObject(int index) {
if (isSet(index) == 0) {
return null;
}
final List<Object> vals = new JsonStringArrayList<>();
final int start = offsetBuffer.getInt(index * OFFSET_WIDTH);
final int end = offsetBuffer.getInt((index + 1) * OFFSET_WIDTH);
final ValueVector vv = getDataVector();
for (int i = start; i < end; i++) {
vals.add(vv.getObject(i));
}
return vals;
}
/**
* Check if element at given index is null.
*
* @param index position of element
* @return true if element at given index is null, false otherwise
*/
@Override
public boolean isNull(int index) {
return (isSet(index) == 0);
}
/**
* Same as {@link #isNull(int)}.
*
* @param index position of element
* @return 1 if element at given index is not null, 0 otherwise
*/
public int isSet(int index) {
final int byteIndex = index >> 3;
final byte b = validityBuffer.getByte(byteIndex);
final int bitIndex = index & 7;
return Long.bitCount(b & (1L << bitIndex));
}
/**
* Get the number of elements that are null in the vector
*
* @return the number of null elements.
*/
@Override
public int getNullCount() {
return BitVectorHelper.getNullCount(validityBuffer, valueCount);
}
/**
* Get the current value capacity for the vector
* @return number of elements that vector can hold.
*/
@Override
public int getValueCapacity() {
return getValidityAndOffsetValueCapacity();
}
private int getValidityAndOffsetValueCapacity() {
final int offsetValueCapacity = Math.max(getOffsetBufferValueCapacity() - 1, 0);
return Math.min(offsetValueCapacity, getValidityBufferValueCapacity());
}
private int getValidityBufferValueCapacity() {
return (int) (validityBuffer.capacity() * 8L);
}
public void setNotNull(int index) {
while (index >= getValidityAndOffsetValueCapacity()) {
reallocValidityAndOffsetBuffers();
}
BitVectorHelper.setValidityBitToOne(validityBuffer, index);
lastSet = index + 1;
}
/**
* Start a new value in the list vector
*
* @param index index of the value to start
*/
@Override
public int startNewValue(int index) {
while (index >= getValidityAndOffsetValueCapacity()) {
reallocValidityAndOffsetBuffers();
}
for (int i = lastSet; i <= index; i++) {
final int currentOffset = offsetBuffer.getInt(i * OFFSET_WIDTH);
offsetBuffer.setInt((i + 1) * OFFSET_WIDTH, currentOffset);
}
BitVectorHelper.setValidityBitToOne(validityBuffer, index);
lastSet = index + 1;
return offsetBuffer.getInt(lastSet * OFFSET_WIDTH);
}
/**
* End the current value
*
* @param index index of the value to end
* @param size number of elements in the list that was written
*/
public void endValue(int index, int size) {
final int currentOffset = offsetBuffer.getInt((index + 1) * OFFSET_WIDTH);
offsetBuffer.setInt((index + 1) * OFFSET_WIDTH, currentOffset + size);
}
/**
* Sets the value count for the vector
*
* @param valueCount value count
*/
@Override
public void setValueCount(int valueCount) {
this.valueCount = valueCount;
if (valueCount > 0) {
while (valueCount > getValidityAndOffsetValueCapacity()) {
/* check if validity and offset buffers need to be re-allocated */
reallocValidityAndOffsetBuffers();
}
for (int i = lastSet; i < valueCount; i++) {
/* fill the holes with offsets */
final int currentOffset = offsetBuffer.getInt(i * OFFSET_WIDTH);
offsetBuffer.setInt((i + 1) * OFFSET_WIDTH, currentOffset);
}
}
/* valueCount for the data vector is the current end offset */
final int childValueCount = (valueCount == 0) ? 0 :
offsetBuffer.getInt(valueCount * OFFSET_WIDTH);
/* set the value count of data vector and this will take care of
* checking whether data buffer needs to be reallocated.
*/
vector.setValueCount(childValueCount);
}
public void setLastSet(int value) {
lastSet = value;
}
public int getLastSet() {
return lastSet;
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apereo.portal.groups.smartldap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.concurrent.ConcurrentException;
import org.apache.commons.lang3.concurrent.LazyInitializer;
import org.danann.cernunnos.Task;
import org.danann.cernunnos.runtime.RuntimeRequestResponse;
import org.danann.cernunnos.runtime.ScriptRunner;
import org.apereo.portal.EntityIdentifier;
import org.apereo.portal.groups.ComponentGroupServiceDescriptor;
import org.apereo.portal.groups.EntityGroupImpl;
import org.apereo.portal.groups.EntityTestingGroupImpl;
import org.apereo.portal.groups.GroupsException;
import org.apereo.portal.groups.IEntityGroup;
import org.apereo.portal.groups.IEntityGroupStore;
import org.apereo.portal.groups.IEntityGroupStoreFactory;
import org.apereo.portal.groups.IGroupConstants;
import org.apereo.portal.groups.IGroupMember;
import org.apereo.portal.groups.ILockableEntityGroup;
import org.apereo.portal.security.IPerson;
import org.apereo.portal.security.PersonFactory;
import org.jasig.services.persondir.IPersonAttributeDao;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.ldap.core.AttributesMapper;
import org.springframework.ldap.core.ContextSource;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
public final class SmartLdapGroupStore implements IEntityGroupStore {
// Instance Members.
private String memberOfAttributeName = "memberOf"; // default
public void setMemberOfAttributeName(String memberOfAttributeName) {
this.memberOfAttributeName = memberOfAttributeName;
}
private String baseGroupDn = null;
public void setBaseGroupDn(String baseGroupDn) {
this.baseGroupDn = baseGroupDn;
}
private String filter = "(objectCategory=group)"; // default
public void setFilter(String filter) {
this.filter = filter;
}
private ContextSource ldapContext = null; // default; must be set if used -- validated in refreshTree()
public void setLdapContext(ContextSource ldapContext) {
this.ldapContext = ldapContext;
}
private boolean resolveMemberGroups = false; // default
public void setResolveMemberGroups(boolean resolveMemberGroups) {
this.resolveMemberGroups = resolveMemberGroups;
}
private List<String> resolveDnList = Collections.emptyList(); // default; used with resolveMemberGroups
public void setResolveDn(String resolveDn) {
this.resolveDnList = Collections.singletonList(resolveDn);
}
public void setResolveDnList(List<String> resolveDnList) {
this.resolveDnList = Collections.unmodifiableList(resolveDnList);
}
private AttributesMapper attributesMapper;
@Required
public void setAttributesMapper(AttributesMapper attributesMapper) {
this.attributesMapper = attributesMapper;
}
/**
* Period after which SmartLdap will drop and rebuild the groups tree. May
* be overridden in SmartLdapGroupStoreConfix.xml. A value of zero or less
* (negative) disables this feature.
*/
private long groupsTreeRefreshIntervalSeconds = 900; // default
public void setGroupsTreeRefreshIntervalSeconds(long groupsTreeRefreshIntervalSeconds) {
this.groupsTreeRefreshIntervalSeconds = groupsTreeRefreshIntervalSeconds;
}
/**
* Timestamp (milliseconds) of the last tree refresh.
*/
private volatile long lastTreeRefreshTime = 0;
// Cernunnos tech...
private final ScriptRunner runner = new ScriptRunner();
private final Task initTask = runner.compileTask(getClass().getResource("init.crn").toExternalForm());
@Resource(name="personAttributeDao")
private IPersonAttributeDao personAttributeDao;
private final Logger log = LoggerFactory.getLogger(getClass());
/*
* Indexed Collections.
*/
/**
* Single-object abstraction that contains all knowledge of SmartLdap groups:
* <ul>
* <li>Map of all groups keyed by 'key' (DN). Includes ROOT_GROUP.</li>
* <li>Map of all parent relationships keyed by the 'key' (DN) of the child;
* the values are lists of the 'keys' (DNs) of its parents.
* Includes ROOT_GROUP.</li>
* <li>Map of all child relationships keyed by the 'key' (DN) of the parent;
* the values are lists of the 'keys' (DNs) of its children.
* Includes ROOT_GROUP.</li>
* <li>Map of all 'keys' (DNs) of SmartLdap managed groups indexed by group
* name in upper case. Includes ROOT_GROUP.</li>
* </ul>
*/
private GroupsTree groupsTree;
/*
* Public API.
*/
public static final String UNSUPPORTED_MESSAGE =
"The SmartLdap implementation of JA-SIG Groups and Permissions (GaP) " +
"does not support this operation.";
public static final String ROOT_KEY = "SmartLdap ROOT";
public static final String ROOT_DESC = "A root group provided for the SmartLdapGroupStore.";
private static final LazyInitializer<IEntityGroup> rootGroupInitializer = new LazyInitializer<IEntityGroup>() {
@Override
protected IEntityGroup initialize() {
IEntityGroup rslt = new EntityTestingGroupImpl(ROOT_KEY, IPerson.class);
rslt.setCreatorID("System");
rslt.setName(ROOT_KEY);
rslt.setDescription(ROOT_DESC);
return rslt;
}
};
public boolean contains(IEntityGroup group, IGroupMember member) throws GroupsException {
log.warn("Unsupported method accessed: SmartLdapGroupStore.contains");
throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE);
}
public void delete(IEntityGroup group) throws GroupsException {
log.warn("Unsupported method accessed: SmartLdapGroupStore.delete");
throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE);
}
/**
* Returns an instance of the <code>IEntityGroup</code> from the data store.
* @return org.apereo.portal.groups.IEntityGroup
* @param key java.lang.String
*/
public IEntityGroup find(String key) throws GroupsException {
if (isTreeRefreshRequired()) {
refreshTree();
}
log.debug("Invoking find() for key: {}", key);
// All of our groups (incl. ROOT_GROUP)
// are indexed in the 'groups' map by key...
return groupsTree.getGroups().get(key);
}
/**
* Returns an <code>Iterator</code> over the <code>Collection</code> of
* <code>IEntityGroups</code> that the <code>IGroupMember</code> belongs to.
* @return java.util.Iterator
* @param gm org.apereo.portal.groups.IEntityGroup
*/
public Iterator findParentGroups(IGroupMember gm) throws GroupsException {
if (isTreeRefreshRequired()) {
refreshTree();
}
List<IEntityGroup> rslt = new LinkedList<>();
final IEntityGroup root = getRootGroup();
if (gm.isGroup()) {
// Check the local indeces...
IEntityGroup group = (IEntityGroup) gm;
List<String> list = groupsTree.getParents().get(group.getLocalKey());
if (list != null) {
// should only reach this code if its a SmartLdap managed group...
for (String s : list) {
rslt.add(groupsTree.getGroups().get(s));
}
}
} else if (!gm.isGroup() && gm.getLeafType().equals(root.getLeafType())) {
// Ask the individual...
EntityIdentifier ei = gm.getUnderlyingEntityIdentifier();
Map<String,List<Object>> seed = new HashMap<>();
List<Object> seedValue = new LinkedList<>();
seedValue.add(ei.getKey());
seed.put(IPerson.USERNAME, seedValue);
Map<String,List<Object>> attr = personAttributeDao.getMultivaluedUserAttributes(seed);
// avoid NPEs and unnecessary IPerson creation
if (attr != null && !attr.isEmpty()) {
IPerson p = PersonFactory.createPerson();
p.setAttributes(attr);
// Analyze its memberships...
Object[] groupKeys = p.getAttributeValues(memberOfAttributeName);
// IPerson returns null if no value is defined for this attribute...
if (groupKeys != null) {
List<String> list = new LinkedList<>();
for (Object o : groupKeys) {
list.add((String) o);
}
for (String s : list) {
if (groupsTree.getGroups().containsKey(s)) {
rslt.add(groupsTree.getGroups().get(s));
}
}
}
}
}
return rslt.iterator();
}
/**
* Returns an <code>Iterator</code> over the <code>Collection</code> of
* <code>IEntities</code> that are members of this <code>IEntityGroup</code>.
* @return java.util.Iterator
* @param group org.apereo.portal.groups.IEntityGroup
*/
public Iterator findEntitiesForGroup(IEntityGroup group) throws GroupsException {
if (isTreeRefreshRequired()) {
refreshTree();
}
log.debug("Invoking findEntitiesForGroup() for group: {}", group.getLocalKey());
// We only deal w/ group-group relationships here...
return findMemberGroups(group);
}
public ILockableEntityGroup findLockable(String key) throws GroupsException {
log.warn("Unsupported method accessed: SmartLdapGroupStore.findLockable");
throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE);
}
/**
* Returns a <code>String[]</code> containing the keys of <code>IEntityGroups</code>
* that are members of this <code>IEntityGroup</code>. In a composite group
* system, a group may contain a member group from a different service. This is
* called a foreign membership, and is only possible in an internally-managed
* service. A group store in such a service can return the key of a foreign member
* group, but not the group itself, which can only be returned by its local store.
*
* @return String[]
* @param group org.apereo.portal.groups.IEntityGroup
*/
public String[] findMemberGroupKeys(IEntityGroup group) throws GroupsException {
if (isTreeRefreshRequired()) {
refreshTree();
}
log.debug("Invoking findMemberGroupKeys() for group: {}", group.getLocalKey());
List<String> rslt = new LinkedList<>();
for (Iterator it=findMemberGroups(group); it.hasNext();) {
IEntityGroup g = (IEntityGroup) it.next();
// Return composite keys here...
rslt.add(g.getKey());
}
return rslt.toArray(new String[rslt.size()]);
}
/**
* Returns an <code>Iterator</code> over the <code>Collection</code> of
* <code>IEntityGroups</code> that are members of this <code>IEntityGroup</code>.
* @return java.util.Iterator
* @param group org.apereo.portal.groups.IEntityGroup
*/
public Iterator findMemberGroups(IEntityGroup group) throws GroupsException {
if (isTreeRefreshRequired()) {
refreshTree();
}
log.debug("Invoking findMemberGroups() for group: {}", group.getLocalKey());
List<IEntityGroup> rslt = new LinkedList<>();
List<String> list = groupsTree.getChildren().get(group.getLocalKey());
if (list != null) {
// should only reach this code if its a SmartLdap managed group...
for (String s : list) {
rslt.add(groupsTree.getGroups().get(s));
}
}
return rslt.iterator();
}
public IEntityGroup newInstance(Class entityType) throws GroupsException {
log.warn("Unsupported method accessed: SmartLdapGroupStore.newInstance");
throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE);
}
public EntityIdentifier[] searchForGroups(String query, int method, Class leaftype) throws GroupsException {
if (isTreeRefreshRequired()) {
refreshTree();
}
log.debug("Invoking searchForGroups(): query={}, method={}, leaftype=",
query, method, leaftype.getClass().getName());
// We only match the IPerson leaf type...
final IEntityGroup root = getRootGroup();
if (!leaftype.equals(root.getLeafType())) {
return new EntityIdentifier[0];
}
// We need to escape regex special characters that appear in the query string...
final String[][] specials = new String[][] {
/* backslash must come first! */
new String[] { "\\", "\\\\"},
new String[] { "[", "\\[" },
/* closing ']' isn't needed b/c it's a normal character w/o a preceding '[' */
new String[] { "{", "\\{" },
/* closing '}' isn't needed b/c it's a normal character w/o a preceding '{' */
new String[] { "^", "\\^" },
new String[] { "$", "\\$" },
new String[] { ".", "\\." },
new String[] { "|", "\\|" },
new String[] { "?", "\\?" },
new String[] { "*", "\\*" },
new String[] { "+", "\\+" },
new String[] { "(", "\\(" },
new String[] { ")", "\\)" }
};
for (String[] s : specials) {
query = query.replace(s[0], s[1]);
}
// Establish the regex pattern to match on...
String regex;
switch (method) {
case IGroupConstants.IS:
regex = query.toUpperCase();
break;
case IGroupConstants.STARTS_WITH:
regex = query.toUpperCase() + ".*";
break;
case IGroupConstants.ENDS_WITH:
regex = ".*" + query.toUpperCase();
break;
case IGroupConstants.CONTAINS:
regex = ".*" + query.toUpperCase() + ".*";
break;
default:
String msg = "Unsupported search method: " + method;
throw new GroupsException(msg);
}
List<EntityIdentifier> rslt = new LinkedList<>();
for (Map.Entry<String,List<String>> y : groupsTree.getKeysByUpperCaseName().entrySet()) {
if (y.getKey().matches(regex)) {
List<String> keys = y.getValue();
for (String k : keys) {
rslt.add(new EntityIdentifier(k, IEntityGroup.class));
}
}
}
return rslt.toArray(new EntityIdentifier[rslt.size()]);
}
public void update(IEntityGroup group) throws GroupsException {
log.warn("Unsupported method accessed: SmartLdapGroupStore.update");
throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE);
}
public void updateMembers(IEntityGroup group) throws GroupsException {
log.warn("Unsupported method accessed: SmartLdapGroupStore.updateMembers");
throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE);
}
public LdapRecord detectAndEliminateGroupReferences(LdapRecord record, List<String> groupChain) {
LdapRecord rslt = record; // default
List<String> keysOfChildren = record.getKeysOfChildren();
List<String> filteredChildren = new ArrayList<>();
for (String key : keysOfChildren) {
if (!groupChain.contains(key)) {
filteredChildren.add(key);
} else {
// Circular reference detected!
log.warn("Circular reference detected and removed for the following groups: '{}' and '{}'",
key, record.getGroup().getLocalKey());
}
}
if (filteredChildren.size() < keysOfChildren.size()) {
rslt = new LdapRecord(record.getGroup(), filteredChildren);
}
return rslt;
}
public boolean hasUndiscoveredChildrenWithinDn(LdapRecord record, String referenceDn, Set<LdapRecord> groupsSet) {
boolean rslt = false; // default
for (String childKey : record.getKeysOfChildren()) {
if (childKey.endsWith(referenceDn)) {
// Make sure the one we found isn't already in the groupsSet;
// NOTE!... this test takes advantage of the implementation of
// equals() on LdapRecord, which states that 2 records with the
// same group key are equal.
IEntityGroup group = new EntityGroupImpl(childKey, IPerson.class);
List<String> list = Collections.emptyList();
LdapRecord proxy = new LdapRecord(group, list);
if (!groupsSet.contains(proxy)) {
rslt = true;
break;
} else {
log.trace("Child group is already in collection: {}", childKey);
}
}
}
log.trace("Query for children of parent group '{}': {}", record.getGroup().getLocalKey(), rslt);
return rslt;
}
/*
* Implementation.
*/
@PostConstruct
private void postConstruct() {
Factory.setInstance(this);
}
private IEntityGroup getRootGroup() {
try {
return rootGroupInitializer.get();
} catch (ConcurrentException ce) {
throw new RuntimeException("Failed to obtain the SmartLdap root group", ce);
}
}
private boolean isTreeRefreshRequired() {
if (groupsTree == null) {
// Of course we need it
return true;
}
if (groupsTreeRefreshIntervalSeconds <= 0) {
// SmartLdap refresh feature may be disabled by setting
// groupsTreeRefreshIntervalSeconds to zero or negative.
return false;
}
// The 'lastTreeRefreshTime' member variable is volatile. As of JDK 5,
// this fact should make reads of this variable dependable in a multi-
// threaded environment.
final long treeExpiresTimestamp = lastTreeRefreshTime + (groupsTreeRefreshIntervalSeconds * 1000L);
return System.currentTimeMillis() > treeExpiresTimestamp;
}
/**
* Verifies that the collection of groups needs rebuilding and, if so,
* spawns a new worker <code>Thread</code> for that purpose.
*/
private synchronized void refreshTree() {
if (!isTreeRefreshRequired()) {
// The groupsTree was already re-built while
// we were waiting to enter this method.
return;
}
log.info("Refreshing groups tree for SmartLdap");
// We must join the builder thread if
// we don't have an existing groupsTree.
final boolean doJoin = groupsTree == null;
// In most cases, re-build the tree in a separate thread; the current
// request can proceed with the newly-expired groupsTree.
Thread refresh = new Thread("SmartLdap Refresh Worker") {
public void run() {
// Replace the old with the new...
try {
groupsTree = buildGroupsTree();
} catch (Throwable t) {
log.error("SmartLdapGroupStore failed to build the groups tree", t);
}
}
};
refresh.setDaemon(true);
refresh.start();
if (doJoin) {
try {
log.info("Joining the SmartLdap Refresh Worker Thread");
refresh.join();
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
// Even if the refresh thread failed, don't try
// again for another groupsTreeRefreshIntervalSeconds.
lastTreeRefreshTime = System.currentTimeMillis();
}
private GroupsTree buildGroupsTree() {
long timestamp = System.currentTimeMillis();
// Prepare the new local indeces...
Map<String,IEntityGroup> new_groups = Collections.synchronizedMap(new HashMap<String,IEntityGroup>());
Map<String,List<String>> new_parents = Collections.synchronizedMap(new HashMap<String,List<String>>());
Map<String,List<String>> new_children = Collections.synchronizedMap(new HashMap<String,List<String>>());
Map<String,List<String>> new_keysByUpperCaseName = Collections.synchronizedMap(new HashMap<String,List<String>>());
// Gather IEntityGroup objects from LDAP...
RuntimeRequestResponse req = new RuntimeRequestResponse();
Set<LdapRecord> set = new HashSet<>();
req.setAttribute("GROUPS", set);
req.setAttribute("smartLdapGroupStore", this);
SubQueryCounter queryCounter = new SubQueryCounter();
req.setAttribute("queryCounter", queryCounter);
req.setAttribute("filter", filter); // This one changes iteratively...
req.setAttribute("baseFilter", filter); // while this one stays the same.
if (StringUtils.isBlank(baseGroupDn)) {
throw new IllegalStateException("baseGroupDn property not set");
}
req.setAttribute("baseGroupDn", baseGroupDn);
if (ldapContext == null) {
throw new IllegalStateException("ldapContext property not set");
}
req.setAttribute("ldapContext", ldapContext);
req.setAttribute("resolveMemberGroups", resolveMemberGroups);
req.setAttribute("resolveDnList", resolveDnList);
req.setAttribute("memberOfAttributeName", memberOfAttributeName);
req.setAttribute("attributesMapper", attributesMapper);
runner.run(initTask, req);
log.info("init() found {} records", set.size());
// Do a first loop to build the main catalog (new_groups)...
for (LdapRecord r : set) {
// new_groups (me)...
IEntityGroup g = r.getGroup();
new_groups.put(g.getLocalKey(), g);
}
// Do a second loop to build local indeces...
for (LdapRecord r : set) {
IEntityGroup g = r.getGroup();
// new_parents (I am a parent for all my children)...
for (String childKey : r.getKeysOfChildren()) {
// NB: We're only interested in relationships between
// objects in the main catalog (i.e. new_groups);
// discard everything else...
if (!new_groups.containsKey(childKey)) {
break;
}
List<String> parentsList = new_parents.get(childKey);
if (parentsList == null) {
// first parent for this child...
parentsList = Collections.synchronizedList(new LinkedList<String>());
new_parents.put(childKey, parentsList);
}
parentsList.add(g.getLocalKey());
}
// new_children...
List<String> childrenList = Collections.synchronizedList(new LinkedList<String>());
for (String childKey : r.getKeysOfChildren()) {
// NB: We're only interested in relationships between
// objects in the main catalog (i.e. new_groups);
// discard everything else...
if (new_groups.containsKey(childKey)) {
childrenList.add(childKey);
}
}
new_children.put(g.getLocalKey(), childrenList);
// new_keysByUpperCaseName...
List<String> groupsWithMyName = new_keysByUpperCaseName.get(g.getName().toUpperCase());
if (groupsWithMyName == null) {
// I am the first group with my name (pretty likely)...
groupsWithMyName = Collections.synchronizedList(new LinkedList<String>());
new_keysByUpperCaseName.put(g.getName().toUpperCase(), groupsWithMyName);
}
groupsWithMyName.add(g.getLocalKey());
}
/*
* Now load the ROOT_GROUP into the collections...
*/
// new_groups (me)...
final IEntityGroup root = getRootGroup();
new_groups.put(root.getLocalKey(), root);
// new_parents (I am a parent for all groups that have no other parent)...
List<String> childrenOfRoot = Collections.synchronizedList(new LinkedList<String>()); // for later...
for (String possibleChildKey : new_groups.keySet()) {
if (!possibleChildKey.equals(root.getLocalKey()) && !new_parents.containsKey(possibleChildKey)) {
List<String> p = Collections.synchronizedList(new LinkedList<String>());
p.add(root.getLocalKey());
new_parents.put(possibleChildKey, p);
childrenOfRoot.add(possibleChildKey); // for later...
}
}
// new_children...
new_children.put(root.getLocalKey(), childrenOfRoot);
// new_keysByUpperCaseName...
List<String> groupsWithMyName = new_keysByUpperCaseName.get(root.getName().toUpperCase());
if (groupsWithMyName == null) {
// I am the first group with my name (pretty likely)...
groupsWithMyName = Collections.synchronizedList(new LinkedList<String>());
new_keysByUpperCaseName.put(root.getName().toUpperCase(), groupsWithMyName);
}
groupsWithMyName.add(root.getLocalKey());
final long benchmark = System.currentTimeMillis() - timestamp;
log.info("Refresh of groups tree completed in {} milliseconds", benchmark);
log.info("Total number of LDAP queries: {}", queryCounter.getCount() + 1);
final String msg = "init() :: final size of each collection is as follows..."
+ "\n\tgroups={}"
+ "\n\tparents={}"
+ "\n\tchildren={}"
+ "\n\tkeysByUpperCaseName={}";
log.info(msg, new_groups.size(), new_parents.size(), new_children.size(), new_keysByUpperCaseName.size());
if (log.isTraceEnabled()) {
StringBuilder sbuilder = new StringBuilder();
// new_groups...
sbuilder.setLength(0);
sbuilder.append("Here are the keys of the new_groups collection:");
for (String s : new_groups.keySet()) {
sbuilder.append("\n\t").append(s);
}
log.trace(sbuilder.toString());
// new_parents...
sbuilder.setLength(0);
sbuilder.append("Here are the parents of each child in the new_parents collection:");
for (Map.Entry<String,List<String>> y : new_parents.entrySet()) {
sbuilder.append("\n\tchild=").append(y.getKey());
for (String s : y.getValue()) {
sbuilder.append("\n\t\tparent=").append(s);
}
}
log.trace(sbuilder.toString());
// new_children...
sbuilder.setLength(0);
sbuilder.append("Here are the children of each parent in the new_children collection:");
for (Map.Entry<String,List<String>> y : new_children.entrySet()) {
sbuilder.append("\n\tparent=").append(y.getKey());
for (String s : y.getValue()) {
sbuilder.append("\n\t\tchild=").append(s);
}
}
log.trace(sbuilder.toString());
// new_keysByUpperCaseName...
sbuilder.append("Here are the groups that have each name in the new_keysByUpperCaseName collection:");
for (Map.Entry<String,List<String>> y : new_keysByUpperCaseName.entrySet()) {
sbuilder.append("\n\tname=").append(y.getKey());
for (String s : y.getValue()) {
sbuilder.append("\n\t\tgroup=").append(s);
}
}
log.trace(sbuilder.toString());
}
return new GroupsTree(new_groups, new_parents, new_children, new_keysByUpperCaseName);
}
/*
* Nested Types.
*/
public static final class Factory implements IEntityGroupStoreFactory {
private static IEntityGroupStore instance;
private static void setInstance(IEntityGroupStore smartLdapGroupStore) {
instance = smartLdapGroupStore;
}
/*
* Public API.
*/
public IEntityGroupStore newGroupStore() throws GroupsException {
return instance;
}
public IEntityGroupStore newGroupStore(ComponentGroupServiceDescriptor svcDescriptor) throws GroupsException {
return instance;
}
}
private static final class GroupsTree {
// Instance Members.
private final Map<String,IEntityGroup> groups;
private final Map<String,List<String>> parents;
private final Map<String,List<String>> children;
private final Map<String,List<String>> keysByUpperCaseName;
/*
* Public API.
*/
public GroupsTree(Map<String,IEntityGroup> groups, Map<String,List<String>> parents,
Map<String,List<String>> children,
Map<String,List<String>> keysByUpperCaseName) {
// Assertions.
if (groups == null) {
String msg = "Argument 'groups' cannot be null.";
throw new IllegalArgumentException(msg);
}
if (parents == null) {
String msg = "Argument 'parents' cannot be null.";
throw new IllegalArgumentException(msg);
}
if (children == null) {
String msg = "Argument 'children' cannot be null.";
throw new IllegalArgumentException(msg);
}
if (keysByUpperCaseName == null) {
String msg = "Argument 'keysByUpperCaseName' cannot be null.";
throw new IllegalArgumentException(msg);
}
// Instance Members.
this.groups = groups;
this.parents = parents;
this.children = children;
this.keysByUpperCaseName = keysByUpperCaseName;
}
public Map<String,IEntityGroup> getGroups() {
return groups;
}
public Map<String,List<String>> getParents() {
return parents;
}
public Map<String,List<String>> getChildren() {
return children;
}
public Map<String,List<String>> getKeysByUpperCaseName() {
return keysByUpperCaseName;
}
}
private static final class SubQueryCounter {
private int count = 0;
@SuppressWarnings("unused")
public void increment() {
++count;
}
public int getCount() {
return count;
}
}
}
| |
/*
* Copyright (c) 2016, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of Salesforce.com nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.dva.argus.service.schedule;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.persist.Transactional;
import com.salesforce.dva.argus.entity.CronJob;
import com.salesforce.dva.argus.entity.DistributedSchedulingLock;
import com.salesforce.dva.argus.entity.JPAEntity;
import com.salesforce.dva.argus.entity.ServiceManagementRecord;
import com.salesforce.dva.argus.entity.ServiceManagementRecord.Service;
import com.salesforce.dva.argus.inject.SLF4JTypeListener;
import com.salesforce.dva.argus.service.AlertService;
import com.salesforce.dva.argus.service.AuditService;
import com.salesforce.dva.argus.service.DefaultService;
import com.salesforce.dva.argus.service.DistributedSchedulingLockService;
import com.salesforce.dva.argus.service.GlobalInterlockService.LockType;
import com.salesforce.dva.argus.service.SchedulingService;
import com.salesforce.dva.argus.service.ServiceManagementService;
import com.salesforce.dva.argus.service.UserService;
import com.salesforce.dva.argus.system.SystemConfiguration;
import org.quartz.*;
import org.quartz.impl.StdSchedulerFactory;
import org.slf4j.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import static com.salesforce.dva.argus.system.SystemAssert.requireArgument;
/**
* Implementation of Distributed scheduling using database
*
* @author Raj Sarkapally rsarkapally@salesforce.com
*
*/
@Singleton
public class DistributedDatabaseSchedulingService extends DefaultService implements SchedulingService {
//~ Static fields/initializers *******************************************************************************************************************
private static final String QUARTZ_THREADPOOL_COUNT = "org.quartz.threadPool.threadCount";
private static final String QUARTZ_THREAD_PRIORITY = "org.quartz.threadPool.threadPriority";
private static final String QUARTZ_THREAD_PRIORITY_VALUE = "3";
//~ Instance fields ******************************************************************************************************************************
@SLF4JTypeListener.InjectLogger
private Logger _logger;
private final AlertService _alertService;
private final UserService _userService;
private final ServiceManagementService _serviceManagementRecordService;
private final AuditService _auditService;
private Thread _alertSchedulingThread;
private SystemConfiguration _configuration;
private final DistributedSchedulingLockService _distributedSchedulingService;
//~ Constructors *********************************************************************************************************************************
/**
* Creates a new DefaultSchedulingService object.
*
* @param alertService The alert service instance to use. Cannot be null.
* @param userService The user service instance to use. Cannot be null.
* @param serviceManagementRecordService The serviceManagementRecordService instance to use. Cannot be null.
* @param auditService The audit service. Cannot be null.
* @param config The system configuration used to configure the service.
*/
@Inject
DistributedDatabaseSchedulingService(AlertService alertService, UserService userService,
ServiceManagementService serviceManagementRecordService, AuditService auditService, SystemConfiguration config, DistributedSchedulingLockService distributedSchedulingLockService) {
super(config);
requireArgument(alertService != null, "Alert service cannot be null.");
requireArgument(userService != null, "User service cannot be null.");
requireArgument(serviceManagementRecordService != null, "Service management record service cannot be null.");
requireArgument(auditService != null, "Audit service cannot be null.");
requireArgument(config != null, "System configuration cannot be null.");
_alertService = alertService;
_userService = userService;
_serviceManagementRecordService = serviceManagementRecordService;
_auditService = auditService;
_configuration = config;
_distributedSchedulingService=distributedSchedulingLockService;
}
//~ Methods **************************************************************************************************************************************
@Override
public synchronized void startAlertScheduling() {
requireNotDisposed();
if (_alertSchedulingThread != null && _alertSchedulingThread.isAlive()) {
_logger.info("Request to start alert scheduling aborted as it is already running.");
} else {
_logger.info("Starting alert scheduling thread.");
_alertSchedulingThread = new SchedulingThread("schedule-alerts", LockType.ALERT_SCHEDULING);
_alertSchedulingThread.start();
_logger.info("Alert scheduling thread started.");
}
}
@Override
public synchronized void dispose() {
stopAlertScheduling();
super.dispose();
_serviceManagementRecordService.dispose();
_alertService.dispose();
_userService.dispose();
}
@Override
public synchronized void stopAlertScheduling() {
requireNotDisposed();
if (_alertSchedulingThread != null && _alertSchedulingThread.isAlive()) {
_logger.info("Stopping alert scheduling");
_alertSchedulingThread.interrupt();
_logger.info("Alert scheduling thread interrupted.");
try {
_logger.info("Waiting for alert scheduling thread to terminate.");
_alertSchedulingThread.join();
} catch (InterruptedException ex) {
_logger.warn("Alert job scheduler was interrupted while shutting down.");
}
_logger.info("Alert job scheduling stopped.");
} else {
_logger.info("Requested shutdown of alert scheduling aborted as it is not yet running.");
}
}
@Override
@Transactional
public synchronized void enableScheduling() {
requireNotDisposed();
_logger.info("Globally enabling all scheduling.");
_setServiceEnabled(true);
_logger.info("All scheduling globally enabled.");
}
@Override
@Transactional
public synchronized void disableScheduling() {
requireNotDisposed();
_logger.info("Globally disabling all scheduling.");
_setServiceEnabled(false);
_logger.info("All scheduling globally disabled.");
}
@Transactional
private boolean _isSchedulingServiceEnabled() {
synchronized (_serviceManagementRecordService) {
return _serviceManagementRecordService.isServiceEnabled(Service.SCHEDULING);
}
}
/**
* Enables the scheduling service.
*
* @param enabled True to enable, false to disable.
*/
@Transactional
protected void _setServiceEnabled(boolean enabled) {
synchronized (_serviceManagementRecordService) {
ServiceManagementRecord record = _serviceManagementRecordService.findServiceManagementRecord(Service.SCHEDULING);
if (record == null) {
record = new ServiceManagementRecord(_userService.findAdminUser(), Service.SCHEDULING, enabled);
}
record.setEnabled(enabled);
_serviceManagementRecordService.updateServiceManagementRecord(record);
}
}
//~ Enums ****************************************************************************************************************************************
/**
* The implementation specific configuration properties.
*
* @author Raj Sarkapally (rsarkapally@salesforce.com)
*/
public enum Property {
/** Specifies the number of threads used for scheduling. Defaults to 1. */
QUARTZ_THREADPOOL_COUNT("service.property.scheduling.quartz.threadPool.threadCount", "1"),
JOBS_BLOCK_SIZE("service.property.scheduling.jobsBlockSize", "1000"),
SCHEDULING_REFRESH_INTERVAL_IN_MILLS("service.property.scheduling.schedulingRefeshInterval", "300000");
private final String _name;
private final String _defaultValue;
private Property(String name, String defaultValue) {
_name = name;
_defaultValue = defaultValue;
}
/**
* Returns the name of the property.
*
* @return The name of the property.
*/
public String getName() {
return _name;
}
/**
* Returns the default property value.
*
* @return The default property value.
*/
public String getDefaultValue() {
return _defaultValue;
}
}
//~ Inner Classes ********************************************************************************************************************************
/**
* Job scheduler.
*
* @author Raj Sarkapally (rsarkapally@salesforce.com)
*/
private class SchedulingThread extends Thread {
private final LockType lockType;
/**
* Creates a new SchedulingThread object.
*
* @param name The name of the thread.
* @param Schedulingtype Type of the schedule. Cannot be null.
*/
public SchedulingThread(String name, LockType lockType) {
super(name);
this.lockType = lockType;
}
/**
* It fetches all enabled CRON jobs from database.
*
* @return returns all enabled jobs for the given job type.
*/
protected List<CronJob> getEnabledJobs() {
List<CronJob> result = new ArrayList<>();
if (!isDisposed()) {
if (LockType.ALERT_SCHEDULING.equals(lockType)) {
_logger.info("Retreiving all enabled alerts to schedule.");
synchronized (_alertService) {
result.addAll(_alertService.findAlertsByStatus(true));
}
_logger.info("Retrieved {} alerts.", result.size());
}
}
return result;
}
@Override
public void run() {
Scheduler scheduler = null;
long currentScheduleEndTime = -1;
int jobsBlockSize=Integer.parseInt(_configuration.getValue(Property.JOBS_BLOCK_SIZE.getName(), Property.JOBS_BLOCK_SIZE.getDefaultValue()));
long schedulingRefreshTime = Long.parseLong(_configuration.getValue(Property.SCHEDULING_REFRESH_INTERVAL_IN_MILLS.getName(), Property.SCHEDULING_REFRESH_INTERVAL_IN_MILLS.getDefaultValue()));
while (!isInterrupted()) {
if (_isSchedulingServiceEnabled()) {
List<CronJob> jobs = null;
DistributedSchedulingLock distributedSchedulingLock = _distributedSchedulingService.updateNGetDistributedScheduleByType(LockType.ALERT_SCHEDULING,jobsBlockSize,schedulingRefreshTime);
int jobsFromIndex = distributedSchedulingLock.getCurrentIndex() - jobsBlockSize;
while(jobsFromIndex < distributedSchedulingLock.getJobCount()){
if(currentScheduleEndTime < distributedSchedulingLock.getNextScheduleStartTime()){
_disposeScheduler(scheduler);
scheduler = _createScheduler();
_logger.info("Creating a new scheduler with name {}", _getSchedulerName(scheduler));
currentScheduleEndTime=distributedSchedulingLock.getNextScheduleStartTime();
jobs = getEnabledJobs();
}
int jobsToIndex = jobs.size()<(jobsFromIndex+jobsBlockSize)?jobs.size():jobsFromIndex+jobsBlockSize;
_logger.info("Adding jobs between {} and {} to scheduler {}", jobsFromIndex, jobsToIndex,_getSchedulerName(scheduler));
addJobsToScheduler(scheduler, _drainTo(jobs, jobsFromIndex, jobsToIndex));
_sleep(100); //This will distribute jobs evenly across schedulers.
distributedSchedulingLock = _distributedSchedulingService.updateNGetDistributedScheduleByType(LockType.ALERT_SCHEDULING,jobsBlockSize,schedulingRefreshTime);
jobsFromIndex = distributedSchedulingLock.getCurrentIndex() - jobsBlockSize;
}
_logger.info("All jobs are scheduled. Scheduler {} is sleeping for {} millis", _getSchedulerName(scheduler), distributedSchedulingLock.getNextScheduleStartTime()-System.currentTimeMillis());
_logger.info("Next schedule time is {}", distributedSchedulingLock.getNextScheduleStartTime());
_sleep(distributedSchedulingLock.getNextScheduleStartTime()-System.currentTimeMillis());
}
}
_disposeScheduler(scheduler);
}
private List<CronJob> _drainTo(List<CronJob> jobs,int fromIndex, int toIndex){
if(fromIndex>=jobs.size())
return new ArrayList<>();
return jobs.subList(fromIndex, toIndex);
}
private Scheduler _createScheduler(){
String schedulerName = null;
Properties props = new Properties();
// Set quartz worker thread properties
props.put(QUARTZ_THREADPOOL_COUNT,
_configuration.getValue(Property.QUARTZ_THREADPOOL_COUNT.getName(), Property.QUARTZ_THREADPOOL_COUNT.getDefaultValue()));
props.put(QUARTZ_THREAD_PRIORITY, QUARTZ_THREAD_PRIORITY_VALUE);
props.put(StdSchedulerFactory.PROP_SCHED_SCHEDULER_THREADS_INHERIT_CONTEXT_CLASS_LOADER_OF_INITIALIZING_THREAD, true);
/* Have multiple scheduler instances for different job types, so that when
* we stop the previous instance of a scheduler during the refresh cycle it does not affect another scheduler.
*/
switch (Thread.currentThread().getName()) {
case "schedule-alerts":
default:
schedulerName = "AlertScheduler-" + this.hashCode();
}
props.put(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, schedulerName);
SchedulerFactory schedulerFactory;
Scheduler scheduler = null;
try {
schedulerFactory = new StdSchedulerFactory(props);
scheduler = schedulerFactory.getScheduler();
scheduler.start();
} catch (Exception e) {
_logger.error("Exception in setting up scheduler: {}", e);
}
return scheduler;
}
private String _getSchedulerName(Scheduler scheduler){
if(scheduler==null)
return "no-scheduler-instance";
try {
return scheduler.getSchedulerName();
} catch (SchedulerException e) {
_logger.error(e.toString());
return "no-scheduler";
}
}
private void addJobsToScheduler(Scheduler scheduler, List<CronJob> jobs){
for (CronJob job : jobs) {
_logger.debug("Adding job to scheduler: {}", job);
try {
// Convert from linux cron to quartz cron expression
String quartzCronEntry = "0 " + job.getCronEntry().substring(0, job.getCronEntry().length() - 1) + "?";
JobDetail jobDetail = JobBuilder.newJob(RunnableJob.class).build();
CronTrigger cronTrigger = TriggerBuilder.newTrigger().withSchedule(CronScheduleBuilder.cronSchedule(quartzCronEntry)).build();
// Pass parameter to quartz worker threads
jobDetail.getJobDataMap().put(RunnableJob.CRON_JOB, job);
jobDetail.getJobDataMap().put(RunnableJob.LOCK_TYPE, lockType);
jobDetail.getJobDataMap().put("AlertService", _alertService);
jobDetail.getJobDataMap().put("AuditService", _auditService);
scheduler.scheduleJob(jobDetail, cronTrigger);
} catch (Exception ex) {
String msg = "Failed to schedule job {0} : {1}";
JPAEntity entity = JPAEntity.class.cast(job);
_auditService.createAudit(msg, entity, entity, ex.getMessage());
_logger.error("Failed to schedule job {} : {}", job, ex.getMessage());
}
}
}
private void _disposeScheduler(Scheduler scheduler) {
if (scheduler != null) {
try {
scheduler.shutdown();
/* Add a small sleep so Tomcat does not complain - the web application has started a thread,
* but has failed to stop it.This is very likely to create a memory leak.
*/
Thread.sleep(2000);
} catch (SchedulerException e) {
_logger.error("Quartz failed to shutdown {}", e);
} catch (InterruptedException e) {
_logger.warn("Shutdown of quartz scheduler was interrupted.");
Thread.currentThread().interrupt();
}
}
}
private void _sleep(long millis) {
try {
sleep(millis);
} catch (InterruptedException ex) {
_logger.warn("Scheduling was interrupted.");
interrupt();
}
}
}
}
/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.quotas;
import static org.apache.hadoop.hbase.quotas.ThrottleQuotaTestUtil.doGets;
import static org.apache.hadoop.hbase.quotas.ThrottleQuotaTestUtil.doPuts;
import static org.apache.hadoop.hbase.quotas.ThrottleQuotaTestUtil.triggerNamespaceCacheRefresh;
import static org.apache.hadoop.hbase.quotas.ThrottleQuotaTestUtil.triggerTableCacheRefresh;
import static org.apache.hadoop.hbase.quotas.ThrottleQuotaTestUtil.triggerUserCacheRefresh;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({ RegionServerTests.class, LargeTests.class })
public class TestClusterScopeQuotaThrottle {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestClusterScopeQuotaThrottle.class);
private final static int REFRESH_TIME = 30 * 60000;
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static TableName[] TABLE_NAMES =
new TableName[] { TableName.valueOf("TestQuotaAdmin0"), TableName.valueOf("TestQuotaAdmin1"),
TableName.valueOf("TestQuotaAdmin2") };
private final static byte[] FAMILY = Bytes.toBytes("cf");
private final static byte[] QUALIFIER = Bytes.toBytes("q");
private final static byte[][] SPLITS = new byte[][] { Bytes.toBytes("1") };
private static Table[] tables;
private final static String NAMESPACE = "TestNs";
private final static TableName TABLE_NAME = TableName.valueOf(NAMESPACE, "TestTable");
private static Table table;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setBoolean(QuotaUtil.QUOTA_CONF_KEY, true);
TEST_UTIL.getConfiguration().setInt(QuotaCache.REFRESH_CONF_KEY, REFRESH_TIME);
TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10);
TEST_UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 100);
TEST_UTIL.getConfiguration().setInt("hbase.client.pause", 250);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 6);
TEST_UTIL.getConfiguration().setBoolean("hbase.master.enabletable.roundrobin", true);
TEST_UTIL.startMiniCluster(2);
TEST_UTIL.waitTableAvailable(QuotaTableUtil.QUOTA_TABLE_NAME);
QuotaCache.TEST_FORCE_REFRESH = true;
tables = new Table[TABLE_NAMES.length];
for (int i = 0; i < TABLE_NAMES.length; ++i) {
tables[i] = TEST_UTIL.createTable(TABLE_NAMES[i], FAMILY);
TEST_UTIL.waitTableAvailable(TABLE_NAMES[i]);
}
TEST_UTIL.getAdmin().createNamespace(NamespaceDescriptor.create(NAMESPACE).build());
table = TEST_UTIL.createTable(TABLE_NAME, FAMILY, SPLITS);
TEST_UTIL.waitTableAvailable(TABLE_NAME);
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
EnvironmentEdgeManager.reset();
for (int i = 0; i < tables.length; ++i) {
if (tables[i] != null) {
tables[i].close();
TEST_UTIL.deleteTable(TABLE_NAMES[i]);
}
}
TEST_UTIL.deleteTable(TABLE_NAME);
TEST_UTIL.getAdmin().deleteNamespace(NAMESPACE);
TEST_UTIL.shutdownMiniCluster();
}
@After
public void tearDown() throws Exception {
ThrottleQuotaTestUtil.clearQuotaCache(TEST_UTIL);
}
@Test
public void testNamespaceClusterScopeQuota() throws Exception {
final Admin admin = TEST_UTIL.getAdmin();
final String NAMESPACE = "default";
// Add 10req/min limit for write request in cluster scope
admin.setQuota(QuotaSettingsFactory.throttleNamespace(NAMESPACE, ThrottleType.WRITE_NUMBER, 10,
TimeUnit.MINUTES, QuotaScope.CLUSTER));
// Add 6req/min limit for read request in machine scope
admin.setQuota(QuotaSettingsFactory.throttleNamespace(NAMESPACE, ThrottleType.READ_NUMBER, 6,
TimeUnit.MINUTES, QuotaScope.MACHINE));
triggerNamespaceCacheRefresh(TEST_UTIL, false, TABLE_NAMES[0]);
// should execute at max 5 write requests and at max 3 read requests
assertEquals(5, doPuts(10, FAMILY, QUALIFIER, tables[0]));
assertEquals(6, doGets(10, tables[0]));
// Remove all the limits
admin.setQuota(QuotaSettingsFactory.unthrottleNamespace(NAMESPACE));
triggerNamespaceCacheRefresh(TEST_UTIL, true, TABLE_NAMES[0]);
}
@Test
public void testTableClusterScopeQuota() throws Exception {
final Admin admin = TEST_UTIL.getAdmin();
admin.setQuota(QuotaSettingsFactory.throttleTable(TABLE_NAME, ThrottleType.READ_NUMBER, 20,
TimeUnit.HOURS, QuotaScope.CLUSTER));
triggerTableCacheRefresh(TEST_UTIL, false, TABLE_NAME);
for (RegionServerThread rst : TEST_UTIL.getMiniHBaseCluster().getRegionServerThreads()) {
for (TableName tableName : rst.getRegionServer().getOnlineTables()) {
if (tableName.getNameAsString().equals(TABLE_NAME.getNameAsString())) {
int rsRegionNum = rst.getRegionServer().getRegions(tableName).size();
if (rsRegionNum == 0) {
// If rs has 0 region, the machine limiter is 0 (20 * 0 / 2)
break;
} else if (rsRegionNum == 1) {
// If rs has 1 region, the machine limiter is 10 (20 * 1 / 2)
// Read rows from 1 region, so can read 10 first time and 0 second time
long count = doGets(20, table);
assertTrue(count == 0 || count == 10);
} else if (rsRegionNum == 2) {
// If rs has 2 regions, the machine limiter is 20 (20 * 2 / 2)
assertEquals(20, doGets(20, table));
}
break;
}
}
}
admin.setQuota(QuotaSettingsFactory.unthrottleTable(TABLE_NAME));
triggerTableCacheRefresh(TEST_UTIL, true, TABLE_NAME);
}
@Test
public void testUserClusterScopeQuota() throws Exception {
final Admin admin = TEST_UTIL.getAdmin();
final String userName = User.getCurrent().getShortName();
// Add 6req/min limit for read request in cluster scope
admin.setQuota(QuotaSettingsFactory.throttleUser(userName, ThrottleType.READ_NUMBER, 6,
TimeUnit.MINUTES, QuotaScope.CLUSTER));
// Add 6req/min limit for write request in machine scope
admin.setQuota(
QuotaSettingsFactory.throttleUser(userName, ThrottleType.WRITE_NUMBER, 6, TimeUnit.MINUTES));
triggerUserCacheRefresh(TEST_UTIL, false, TABLE_NAMES);
// should execute at max 6 read requests and at max 3 write write requests
assertEquals(6, doPuts(10, FAMILY, QUALIFIER, tables[0]));
assertEquals(3, doGets(10, tables[0]));
// Remove all the limits
admin.setQuota(QuotaSettingsFactory.unthrottleUser(userName));
triggerUserCacheRefresh(TEST_UTIL, true, TABLE_NAMES);
}
@org.junit.Ignore @Test // Spews the log w/ triggering of scheduler? HBASE-24035
public void testUserNamespaceClusterScopeQuota() throws Exception {
final Admin admin = TEST_UTIL.getAdmin();
final String userName = User.getCurrent().getShortName();
final String namespace = TABLE_NAMES[0].getNamespaceAsString();
// Add 10req/min limit for read request in cluster scope
admin.setQuota(QuotaSettingsFactory.throttleUser(userName, namespace, ThrottleType.READ_NUMBER,
10, TimeUnit.MINUTES, QuotaScope.CLUSTER));
// Add 6req/min limit for write request in machine scope
admin.setQuota(QuotaSettingsFactory.throttleUser(userName, namespace, ThrottleType.WRITE_NUMBER,
6, TimeUnit.MINUTES));
triggerUserCacheRefresh(TEST_UTIL, false, TABLE_NAMES[0]);
// should execute at max 5 read requests and at max 6 write requests
assertEquals(5, doGets(10, tables[0]));
assertEquals(6, doPuts(10, FAMILY, QUALIFIER, tables[0]));
// Remove all the limits
admin.setQuota(QuotaSettingsFactory.unthrottleUser(userName, namespace));
triggerUserCacheRefresh(TEST_UTIL, true, TABLE_NAMES[0]);
}
@Test
public void testUserTableClusterScopeQuota() throws Exception {
final Admin admin = TEST_UTIL.getAdmin();
final String userName = User.getCurrent().getShortName();
admin.setQuota(QuotaSettingsFactory.throttleUser(userName, TABLE_NAME, ThrottleType.READ_NUMBER,
20, TimeUnit.HOURS, QuotaScope.CLUSTER));
triggerUserCacheRefresh(TEST_UTIL, false, TABLE_NAME);
for (RegionServerThread rst : TEST_UTIL.getMiniHBaseCluster().getRegionServerThreads()) {
for (TableName tableName : rst.getRegionServer().getOnlineTables()) {
if (tableName.getNameAsString().equals(TABLE_NAME.getNameAsString())) {
int rsRegionNum = rst.getRegionServer().getRegions(tableName).size();
if (rsRegionNum == 0) {
// If rs has 0 region, the machine limiter is 0 (20 * 0 / 2)
break;
} else if (rsRegionNum == 1) {
// If rs has 1 region, the machine limiter is 10 (20 * 1 / 2)
// Read rows from 1 region, so can read 10 first time and 0 second time
long count = doGets(20, table);
assertTrue(count == 0 || count == 10);
} else if (rsRegionNum == 2) {
// If rs has 2 regions, the machine limiter is 20 (20 * 2 / 2)
assertEquals(20, doGets(20, table));
}
break;
}
}
}
admin.setQuota(QuotaSettingsFactory.unthrottleUser(userName));
triggerUserCacheRefresh(TEST_UTIL, true, TABLE_NAME);
}
}
| |
/*
* Copyright 2014-2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.incubator.store.tunnel.impl;
import static org.slf4j.LoggerFactory.getLogger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.Service;
import org.onlab.util.KryoNamespace;
import org.onosproject.cluster.ClusterService;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.core.IdGenerator;
import org.onosproject.incubator.net.tunnel.DefaultTunnel;
import org.onosproject.incubator.net.tunnel.Tunnel;
import org.onosproject.incubator.net.tunnel.Tunnel.Type;
import org.onosproject.incubator.net.tunnel.TunnelEndPoint;
import org.onosproject.incubator.net.tunnel.TunnelEvent;
import org.onosproject.incubator.net.tunnel.TunnelId;
import org.onosproject.incubator.net.tunnel.TunnelName;
import org.onosproject.incubator.net.tunnel.TunnelStore;
import org.onosproject.incubator.net.tunnel.TunnelStoreDelegate;
import org.onosproject.incubator.net.tunnel.TunnelSubscription;
import org.onosproject.net.Annotations;
import org.onosproject.net.DefaultAnnotations;
import org.onosproject.net.SparseAnnotations;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.store.AbstractStore;
import org.onosproject.store.app.GossipApplicationStore.InternalState;
import org.onosproject.store.cluster.messaging.ClusterCommunicationService;
import org.onosproject.store.serializers.KryoNamespaces;
import org.onosproject.store.service.EventuallyConsistentMap;
import org.onosproject.store.service.MultiValuedTimestamp;
import org.onosproject.store.service.StorageService;
import org.onosproject.store.service.WallClockTimestamp;
import org.slf4j.Logger;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableSet;
/**
* Manages inventory of tunnel in distributed data store that uses optimistic
* replication and gossip based techniques.
*/
@Component(immediate = true)
@Service
public class DistributedTunnelStore
extends AbstractStore<TunnelEvent, TunnelStoreDelegate>
implements TunnelStore {
private final Logger log = getLogger(getClass());
/**
* The topic used for obtaining globally unique ids.
*/
private String runnelOpTopoic = "tunnel-ops-ids";
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ClusterCommunicationService clusterCommunicator;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ClusterService clusterService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected CoreService coreService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected StorageService storageService;
// tunnel identity as map key in the store.
private EventuallyConsistentMap<TunnelId, Tunnel> tunnelIdAsKeyStore;
// tunnel name as map key in the store.
private EventuallyConsistentMap<TunnelName, Set<TunnelId>> tunnelNameAsKeyStore;
// maintains all the tunnels between source and destination.
private EventuallyConsistentMap<TunnelKey, Set<TunnelId>> srcAndDstKeyStore;
// maintains all the tunnels by tunnel type.
private EventuallyConsistentMap<Tunnel.Type, Set<TunnelId>> typeKeyStore;
// maintains records that app subscribes tunnel.
private EventuallyConsistentMap<ApplicationId, Set<TunnelSubscription>> orderRelationship;
private IdGenerator idGenerator;
@Activate
public void activate() {
KryoNamespace.Builder serializer = KryoNamespace.newBuilder()
.register(KryoNamespaces.API)
.register(MultiValuedTimestamp.class)
.register(InternalState.class);
tunnelIdAsKeyStore = storageService
.<TunnelId, Tunnel>eventuallyConsistentMapBuilder()
.withName("all_tunnel").withSerializer(serializer)
.withTimestampProvider((k, v) -> new WallClockTimestamp()).build();
tunnelNameAsKeyStore = storageService
.<TunnelName, Set<TunnelId>>eventuallyConsistentMapBuilder()
.withName("tunnel_name_tunnel").withSerializer(serializer)
.withTimestampProvider((k, v) -> new WallClockTimestamp()).build();
srcAndDstKeyStore = storageService
.<TunnelKey, Set<TunnelId>>eventuallyConsistentMapBuilder()
.withName("src_dst_tunnel").withSerializer(serializer)
.withTimestampProvider((k, v) -> new WallClockTimestamp()).build();
typeKeyStore = storageService
.<Tunnel.Type, Set<TunnelId>>eventuallyConsistentMapBuilder()
.withName("type_tunnel").withSerializer(serializer)
.withTimestampProvider((k, v) -> new WallClockTimestamp()).build();
orderRelationship = storageService
.<ApplicationId, Set<TunnelSubscription>>eventuallyConsistentMapBuilder()
.withName("type_tunnel").withSerializer(serializer)
.withTimestampProvider((k, v) -> new WallClockTimestamp()).build();
idGenerator = coreService.getIdGenerator(runnelOpTopoic);
log.info("Started");
}
@Deactivate
public void deactivate() {
orderRelationship.destroy();
tunnelIdAsKeyStore.destroy();
srcAndDstKeyStore.destroy();
typeKeyStore.destroy();
tunnelNameAsKeyStore.destroy();
log.info("Stopped");
}
@Override
public TunnelId createOrUpdateTunnel(Tunnel tunnel) {
// tunnelIdAsKeyStore.
if (tunnel.tunnelId() != null && !"".equals(tunnel.tunnelId())) {
Tunnel old = tunnelIdAsKeyStore.get(tunnel.tunnelId());
if (old == null) {
log.info("This tunnel[" + tunnel.tunnelId() + "] is not available.");
return tunnel.tunnelId();
}
DefaultAnnotations oldAnno = (DefaultAnnotations) old.annotations();
SparseAnnotations newAnno = (SparseAnnotations) tunnel.annotations();
Tunnel newT = new DefaultTunnel(old.providerId(), old.src(),
old.dst(), old.type(),
old.state(), old.groupId(),
old.tunnelId(),
old.tunnelName(),
old.path(),
DefaultAnnotations.merge(oldAnno, newAnno));
tunnelIdAsKeyStore.put(tunnel.tunnelId(), newT);
TunnelEvent event = new TunnelEvent(
TunnelEvent.Type.TUNNEL_UPDATED,
tunnel);
notifyDelegate(event);
return tunnel.tunnelId();
} else {
TunnelId tunnelId = TunnelId.valueOf(idGenerator.getNewId());
Tunnel newT = new DefaultTunnel(tunnel.providerId(), tunnel.src(),
tunnel.dst(), tunnel.type(),
tunnel.state(), tunnel.groupId(),
tunnelId,
tunnel.tunnelName(),
tunnel.path(),
tunnel.annotations());
TunnelKey key = TunnelKey.tunnelKey(tunnel.src(), tunnel.dst());
tunnelIdAsKeyStore.put(tunnelId, newT);
Set<TunnelId> tunnelnameSet = tunnelNameAsKeyStore.get(tunnel
.tunnelName());
if (tunnelnameSet == null) {
tunnelnameSet = new HashSet<TunnelId>();
}
tunnelnameSet.add(tunnelId);
tunnelNameAsKeyStore.put(tunnel
.tunnelName(), tunnelnameSet);
Set<TunnelId> srcAndDstKeySet = srcAndDstKeyStore.get(key);
if (srcAndDstKeySet == null) {
srcAndDstKeySet = new HashSet<TunnelId>();
}
srcAndDstKeySet.add(tunnelId);
srcAndDstKeyStore.put(key, srcAndDstKeySet);
Set<TunnelId> typeKeySet = typeKeyStore.get(tunnel.type());
if (typeKeySet == null) {
typeKeySet = new HashSet<TunnelId>();
}
typeKeySet.add(tunnelId);
typeKeyStore.put(tunnel.type(), typeKeySet);
TunnelEvent event = new TunnelEvent(TunnelEvent.Type.TUNNEL_ADDED,
tunnel);
notifyDelegate(event);
return tunnelId;
}
}
@Override
public void deleteTunnel(TunnelId tunnelId) {
Tunnel deletedTunnel = tunnelIdAsKeyStore.get(tunnelId);
if (deletedTunnel == null) {
return;
}
tunnelNameAsKeyStore.get(deletedTunnel.tunnelName()).remove(tunnelId);
tunnelIdAsKeyStore.remove(tunnelId);
TunnelKey key = new TunnelKey(deletedTunnel.src(), deletedTunnel.dst());
srcAndDstKeyStore.get(key).remove(tunnelId);
typeKeyStore.get(deletedTunnel.type()).remove(tunnelId);
TunnelEvent event = new TunnelEvent(TunnelEvent.Type.TUNNEL_REMOVED,
deletedTunnel);
notifyDelegate(event);
}
@Override
public void deleteTunnel(TunnelEndPoint src, TunnelEndPoint dst,
ProviderId producerName) {
TunnelKey key = TunnelKey.tunnelKey(src, dst);
Set<TunnelId> idSet = srcAndDstKeyStore.get(key);
if (idSet == null) {
return;
}
Tunnel deletedTunnel = null;
TunnelEvent event = null;
List<TunnelEvent> ls = new ArrayList<TunnelEvent>();
for (TunnelId id : idSet) {
deletedTunnel = tunnelIdAsKeyStore.get(id);
event = new TunnelEvent(TunnelEvent.Type.TUNNEL_REMOVED,
deletedTunnel);
ls.add(event);
if (producerName.equals(deletedTunnel.providerId())) {
tunnelIdAsKeyStore.remove(deletedTunnel.tunnelId());
tunnelNameAsKeyStore.get(deletedTunnel.tunnelName())
.remove(deletedTunnel.tunnelId());
srcAndDstKeyStore.get(key).remove(deletedTunnel.tunnelId());
typeKeyStore.get(deletedTunnel.type())
.remove(deletedTunnel.tunnelId());
}
}
notifyDelegate(ls);
}
@Override
public void deleteTunnel(TunnelEndPoint src, TunnelEndPoint dst, Type type,
ProviderId producerName) {
TunnelKey key = TunnelKey.tunnelKey(src, dst);
Set<TunnelId> idSet = srcAndDstKeyStore.get(key);
if (idSet == null) {
return;
}
Tunnel deletedTunnel = null;
TunnelEvent event = null;
List<TunnelEvent> ls = new ArrayList<TunnelEvent>();
for (TunnelId id : idSet) {
deletedTunnel = tunnelIdAsKeyStore.get(id);
event = new TunnelEvent(TunnelEvent.Type.TUNNEL_REMOVED,
deletedTunnel);
ls.add(event);
if (producerName.equals(deletedTunnel.providerId())
&& type.equals(deletedTunnel.type())) {
tunnelIdAsKeyStore.remove(deletedTunnel.tunnelId());
tunnelNameAsKeyStore.get(deletedTunnel.tunnelName())
.remove(deletedTunnel.tunnelId());
srcAndDstKeyStore.get(key).remove(deletedTunnel.tunnelId());
typeKeyStore.get(deletedTunnel.type())
.remove(deletedTunnel.tunnelId());
}
}
notifyDelegate(ls);
}
@Override
public Tunnel borrowTunnel(ApplicationId appId, TunnelId tunnelId,
Annotations... annotations) {
Set<TunnelSubscription> orderSet = orderRelationship.get(appId);
if (orderSet == null) {
orderSet = new HashSet<TunnelSubscription>();
}
TunnelSubscription order = new TunnelSubscription(appId, null, null, tunnelId, null, null,
annotations);
Tunnel result = tunnelIdAsKeyStore.get(tunnelId);
if (result != null || Tunnel.State.INACTIVE.equals(result.state())) {
return null;
}
orderSet.add(order);
orderRelationship.put(appId, orderSet);
return result;
}
@Override
public Collection<Tunnel> borrowTunnel(ApplicationId appId,
TunnelEndPoint src,
TunnelEndPoint dst,
Annotations... annotations) {
Set<TunnelSubscription> orderSet = orderRelationship.get(appId);
if (orderSet == null) {
orderSet = new HashSet<TunnelSubscription>();
}
TunnelSubscription order = new TunnelSubscription(appId, src, dst, null, null, null, annotations);
boolean isExist = orderSet.contains(order);
if (!isExist) {
orderSet.add(order);
}
orderRelationship.put(appId, orderSet);
TunnelKey key = TunnelKey.tunnelKey(src, dst);
Set<TunnelId> idSet = srcAndDstKeyStore.get(key);
if (idSet == null || idSet.size() == 0) {
return Collections.emptySet();
}
Collection<Tunnel> tunnelSet = new HashSet<Tunnel>();
for (TunnelId tunnelId : idSet) {
Tunnel result = tunnelIdAsKeyStore.get(tunnelId);
if (Tunnel.State.ACTIVE.equals(result.state())) {
tunnelSet.add(result);
}
}
return tunnelSet;
}
@Override
public Collection<Tunnel> borrowTunnel(ApplicationId appId,
TunnelEndPoint src,
TunnelEndPoint dst, Type type,
Annotations... annotations) {
Set<TunnelSubscription> orderSet = orderRelationship.get(appId);
if (orderSet == null) {
orderSet = new HashSet<TunnelSubscription>();
}
TunnelSubscription order = new TunnelSubscription(appId, src, dst, null, type, null, annotations);
boolean isExist = orderSet.contains(order);
if (!isExist) {
orderSet.add(order);
}
orderRelationship.put(appId, orderSet);
TunnelKey key = TunnelKey.tunnelKey(src, dst);
Set<TunnelId> idSet = srcAndDstKeyStore.get(key);
if (idSet == null || idSet.size() == 0) {
return Collections.emptySet();
}
Collection<Tunnel> tunnelSet = new HashSet<Tunnel>();
for (TunnelId tunnelId : idSet) {
Tunnel result = tunnelIdAsKeyStore.get(tunnelId);
if (type.equals(result.type())
&& Tunnel.State.ACTIVE.equals(result.state())) {
tunnelSet.add(result);
}
}
return tunnelSet;
}
@Override
public Collection<Tunnel> borrowTunnel(ApplicationId appId,
TunnelName tunnelName,
Annotations... annotations) {
Set<TunnelSubscription> orderSet = orderRelationship.get(appId);
if (orderSet == null) {
orderSet = new HashSet<TunnelSubscription>();
}
TunnelSubscription order = new TunnelSubscription(appId, null, null, null, null, tunnelName,
annotations);
boolean isExist = orderSet.contains(order);
if (!isExist) {
orderSet.add(order);
}
orderRelationship.put(appId, orderSet);
Set<TunnelId> idSet = tunnelNameAsKeyStore.get(tunnelName);
if (idSet == null || idSet.size() == 0) {
return Collections.emptySet();
}
Collection<Tunnel> tunnelSet = new HashSet<Tunnel>();
for (TunnelId tunnelId : idSet) {
Tunnel result = tunnelIdAsKeyStore.get(tunnelId);
if (Tunnel.State.ACTIVE.equals(result.state())) {
tunnelSet.add(result);
}
}
return tunnelSet;
}
@Override
public boolean returnTunnel(ApplicationId appId, TunnelName tunnelName,
Annotations... annotations) {
TunnelSubscription order = new TunnelSubscription(appId, null, null, null, null, tunnelName,
annotations);
return deleteOrder(order);
}
@Override
public boolean returnTunnel(ApplicationId appId, TunnelId tunnelId,
Annotations... annotations) {
TunnelSubscription order = new TunnelSubscription(appId, null, null, tunnelId, null, null,
annotations);
return deleteOrder(order);
}
@Override
public boolean returnTunnel(ApplicationId appId, TunnelEndPoint src,
TunnelEndPoint dst, Type type,
Annotations... annotations) {
TunnelSubscription order = new TunnelSubscription(appId, src, dst, null, type, null, annotations);
return deleteOrder(order);
}
@Override
public boolean returnTunnel(ApplicationId appId, TunnelEndPoint src,
TunnelEndPoint dst, Annotations... annotations) {
TunnelSubscription order = new TunnelSubscription(appId, src, dst, null, null, null, annotations);
return deleteOrder(order);
}
private boolean deleteOrder(TunnelSubscription order) {
Set<TunnelSubscription> orderSet = orderRelationship.get(order.consumerId());
if (orderSet == null) {
return true;
}
if (orderSet.contains(order)) {
orderSet.remove(order);
return true;
}
return false;
}
@Override
public Tunnel queryTunnel(TunnelId tunnelId) {
return tunnelIdAsKeyStore.get(tunnelId);
}
@Override
public Collection<TunnelSubscription> queryTunnelSubscription(ApplicationId appId) {
return orderRelationship.get(appId) != null ? ImmutableSet.copyOf(orderRelationship
.get(appId)) : Collections.emptySet();
}
@Override
public Collection<Tunnel> queryTunnel(Type type) {
Collection<Tunnel> result = new HashSet<Tunnel>();
Set<TunnelId> tunnelIds = typeKeyStore.get(type);
if (tunnelIds == null) {
return Collections.emptySet();
}
for (TunnelId id : tunnelIds) {
result.add(tunnelIdAsKeyStore.get(id));
}
return result.size() == 0 ? Collections.emptySet() : ImmutableSet
.copyOf(result);
}
@Override
public Collection<Tunnel> queryTunnel(TunnelEndPoint src, TunnelEndPoint dst) {
Collection<Tunnel> result = new HashSet<Tunnel>();
TunnelKey key = TunnelKey.tunnelKey(src, dst);
Set<TunnelId> tunnelIds = srcAndDstKeyStore.get(key);
if (tunnelIds == null) {
return Collections.emptySet();
}
for (TunnelId id : tunnelIds) {
result.add(tunnelIdAsKeyStore.get(id));
}
return result.size() == 0 ? Collections.emptySet() : ImmutableSet
.copyOf(result);
}
@Override
public Collection<Tunnel> queryAllTunnels() {
return tunnelIdAsKeyStore.values();
}
@Override
public int tunnelCount() {
return tunnelIdAsKeyStore.size();
}
/**
* Uses source TunnelPoint and destination TunnelPoint as map key.
*/
private static final class TunnelKey {
private final TunnelEndPoint src;
private final TunnelEndPoint dst;
private TunnelKey(TunnelEndPoint src, TunnelEndPoint dst) {
this.src = src;
this.dst = dst;
}
/**
* create a map key.
*
* @param src
* @param dst
* @return a key using source ip and destination ip
*/
static TunnelKey tunnelKey(TunnelEndPoint src, TunnelEndPoint dst) {
return new TunnelKey(src, dst);
}
@Override
public int hashCode() {
return Objects.hash(src, dst);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof TunnelKey) {
final TunnelKey other = (TunnelKey) obj;
return Objects.equals(this.src, other.src)
&& Objects.equals(this.dst, other.dst);
}
return false;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass()).add("src", src)
.add("dst", dst).toString();
}
}
}
| |
/*******************************************************************************
* Copyright 2001, 2007 JamesLuo(JamesLuo.au@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* Contributors:
*******************************************************************************/
package org.lirazs.gbackbone.reflection.client.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.lirazs.gbackbone.reflection.client.ArrayType;
import org.lirazs.gbackbone.reflection.client.ClassType;
import org.lirazs.gbackbone.reflection.client.NotFoundException;
import org.lirazs.gbackbone.reflection.client.Parameter;
import org.lirazs.gbackbone.reflection.client.PrimitiveType;
import org.lirazs.gbackbone.reflection.client.ReflectionRequiredException;
import org.lirazs.gbackbone.reflection.client.ReflectionUtils;
import org.lirazs.gbackbone.reflection.client.Type;
import org.lirazs.gbackbone.reflection.client.TypeOracle;
public abstract class TypeOracleImpl implements TypeOracle {
/**
* A reserved metadata tag to indicates that a field type, method return
* type or method parameter type is intended to be parameterized. Note that
* constructor type parameters are not supported at present.
*/
public static final String TAG_TYPEARGS = "gwt.typeArgs";
public static final int MOD_ABSTRACT = 0x00000001;
public static final int MOD_FINAL = 0x00000002;
public static final int MOD_NATIVE = 0x00000004;
public static final int MOD_PRIVATE = 0x00000008;
public static final int MOD_PROTECTED = 0x00000010;
public static final int MOD_PUBLIC = 0x00000020;
public static final int MOD_STATIC = 0x00000040;
public static final int MOD_TRANSIENT = 0x00000080;
public static final int MOD_VOLATILE = 0x00000100;
static final ClassType[] NO_JCLASSES = new ClassType[0];
// static final JConstructor[] NO_JCTORS = new JConstructor[0];
static final FieldImpl[] NO_JFIELDS = new FieldImpl[0];
static final MethodImpl[] NO_JMETHODS = new MethodImpl[0];
// static final Package[] NO_JPACKAGES = new Package[0];
static final Parameter[] NO_JPARAMS = new Parameter[0];
static final Type[] NO_JTYPES = new Type[0];
static final String[][] NO_STRING_ARR_ARR = new String[0][];
static final String[] NO_STRINGS = new String[0];
static String combine(String[] strings, int startIndex) {
StringBuffer sb = new StringBuffer();
for (int i = startIndex; i < strings.length; i++) {
String s = strings[i];
sb.append(s);
}
return sb.toString();
}
static String[] modifierBitsToNames(int bits) {
List<String> strings = new ArrayList<String>();
// The order is based on the order in which we want them to appear.
//
if (0 != (bits & MOD_PUBLIC)) {
strings.add("public");
}
if (0 != (bits & MOD_PRIVATE)) {
strings.add("private");
}
if (0 != (bits & MOD_PROTECTED)) {
strings.add("protected");
}
if (0 != (bits & MOD_STATIC)) {
strings.add("static");
}
if (0 != (bits & MOD_ABSTRACT)) {
strings.add("abstract");
}
if (0 != (bits & MOD_FINAL)) {
strings.add("final");
}
if (0 != (bits & MOD_NATIVE)) {
strings.add("native");
}
if (0 != (bits & MOD_TRANSIENT)) {
strings.add("transient");
}
if (0 != (bits & MOD_VOLATILE)) {
strings.add("volatile");
}
return strings.toArray(NO_STRINGS);
}
private static String removeAnonymousNumber(String name){
if (name == null || name.length() <= 0)
return name;
int lastIndex = name.lastIndexOf(".");
try {
Integer.parseInt(name.substring(lastIndex + 1));
name = name.substring(0, lastIndex);
removeAnonymousNumber(name);
return name;
} catch (NumberFormatException e) {
return name;
}
}
public abstract Type doGetType(String name);
public Type getType(String name) throws ReflectionRequiredException {
// Remove all internal and external whitespace.
//
name = name.replaceAll("\\\\s", "");
try {
Type result = parseImpl(name);
if (result == null)
throw new ReflectionRequiredException();
else
return result;
} catch (ReflectionRequiredException e) {
Type result = doGetType(name);
if (result == null)
throw new ReflectionRequiredException(ReflectionUtils.createReflectionRequireMsg(name, ""));
else
return result;
}
}
public static Type findType(String name) {
Type type = typeMap.get(name);
return type;
}
public static Type findType(Class<?> clazz) {
Type type = typeMap.get(clazz.getName().replace('$', '.'));
return type;
}
public ClassType getClassType(String name) throws ReflectionRequiredException {
Type type = this.getType(name);
if (type instanceof ClassType)
return (ClassType)type;
else
throw new RuntimeException(name + " not a class type.");
}
private final Map<Type, ArrayType> arrayTypes = new HashMap<Type, ArrayType>();
/**
* Gets the type object that represents an array of the specified type.
*
* @param componentType the component type of the array, which can itself be
* an array type
* @return a type object representing an array of the component type
*/
public ArrayType getArrayType(Type componentType) {
ArrayType arrayType = arrayTypes.get(componentType);
if (arrayType == null) {
arrayType = new ArrayTypeImpl(componentType);
arrayTypes.put(componentType, arrayType);
}
return arrayType;
}
private ClassType javaLangObject;
/**
* Gets a reference to the type object representing
* <code>java.lang.Object</code>.
*/
public ClassType getJavaLangObject() {
if (javaLangObject == null) {
try {
javaLangObject = this.getClassType("java.lang.Object");
} catch (ReflectionRequiredException e) {
throw new RuntimeException("Not include gwt reflection module? " + e.getMessage());
}
assert javaLangObject != null;
}
return javaLangObject;
}
public static void putType(Type type) {
putType(type, type.getQualifiedSourceName());
}
public static void putType(Type type, String qualifiedSourceName) {
typeMap.put(qualifiedSourceName, type);
}
private static Map<String, Type> typeMap = new HashMap<String, Type>();
public <T> ClassType<T> getClassType(Class<T> classz) throws ReflectionRequiredException {
if (classz.isArray()){
return this.getArrayType(getClassType(classz.getComponentType()));
}
return getClassType(classz.getName().replace('$', '.'));
}
private Type parseImpl(String type) throws ReflectionRequiredException {
if (type.endsWith("[]")) {
String remainder = type.substring(0, type.length() - 2);
Type componentType = this.getType(remainder);
return getArrayType(componentType);
}
if (type.endsWith(">")) {
int bracket = type.indexOf('<');
if (bracket == -1) {
throw new RuntimeException(
"Mismatched brackets; expected '<' to match subsequent '>'");
}
// Resolve the raw type.
//
String rawTypeName = type.substring(0, bracket);
Type rawType = getType(rawTypeName);
//For parameterised type, we just erase it.
if (rawType != null)
return rawType;
}
Type result = findPrimitiveType(type);
if (result != null) {
return result;
}
result = findType(type);
if (result != null) {
return result;
}
throw new ReflectionRequiredException(ReflectionUtils.createReflectionRequireMsg(type, "Unable to recognize '" + type + "' as a type name (is it fully qualified?)"));
}
private PrimitiveType findPrimitiveType(String name){
if (PrimitiveType.BOOLEAN.getSimpleSourceName().equals(name))
return PrimitiveType.BOOLEAN;
else if (PrimitiveType.BYTE.getSimpleSourceName().equals(name))
return PrimitiveType.BYTE;
else if (PrimitiveType.CHAR.getSimpleSourceName().equals(name))
return PrimitiveType.CHAR;
else if (PrimitiveType.DOUBLE.getSimpleSourceName().equals(name))
return PrimitiveType.DOUBLE;
else if (PrimitiveType.FLOAT.getSimpleSourceName().equals(name))
return PrimitiveType.FLOAT;
else if (PrimitiveType.INT.getSimpleSourceName().equals(name))
return PrimitiveType.INT;
else if (PrimitiveType.LONG.getSimpleSourceName().equals(name))
return PrimitiveType.LONG;
else if (PrimitiveType.SHORT.getSimpleSourceName().equals(name))
return PrimitiveType.SHORT;
else if (PrimitiveType.VOID.getSimpleSourceName().equals(name))
return PrimitiveType.VOID;
else
return null;
}
}
| |
package io.seansullivan.algorithms;
import java.lang.Comparable;
import java.lang.System;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.ListIterator;
import java.util.Random;
/**
* Implements several sorting algorithms as static methods for generic
* lists, including:
*
* 1) Merge sort
* 2) Quick sort
* 3) Heap sort
* 4) Shell sort
* 5) Insertion sort
* 6) Selection sort
* 7) Bubble sort
*
* Sorts to implement in future:
* 1) Iterative merge sort
* 2) Radix sort (String, Integer)
*
* The generic type of the list must implement the Comparable interface.
*
* @author Sean Sullivan (seans3@gmail.com)
*
* Copyright (C) 2016 Sean Sullivan
*/
public class Sorts {
/**
* Sorts the passed list using O(n log n) merge sort. Modifies the
* passed list. Sorts in ascending order.
*
* The list is first copied into an array, since we use indexes
* for this algorithm. After the array is sorted, it is written
* back into the passed list.
*
* @param list The list to be sorted.
* @param T generic type of list element, which must implement Comparable.
*/
@SuppressWarnings("unchecked")
public static <T extends Comparable<T>> void mergeSort(List<T> list) {
assert(list != null);
// First, copy the list into an array.
Object[] a = list.toArray();
// Call merge sort on entire array.
mergeSort(a, 0, a.length - 1);
// Write the sorted array back into the list.
arrayIntoList(a, list);
}
/**
* Recursive method which calls itself on two halves of the
* array "a". The base case is when "a" is a single element
* array. After calling itself on the lower and upper halves
* of the array, this method merges the two sorted sub-arrays.
*/
private static void mergeSort(Object[] a, int low, int high) {
assert(a != null);
assert(high < a.length);
// Base case: low == high means "a" is a one-element array.
if (low < high) {
// Calculate the index to split the array.
int middle = ((int) ((high - low) / 2)) + low;
// Recursive call to lower half of "a".
mergeSort(a, low, middle);
// Recursive call to upper half of "a".
mergeSort(a, middle + 1, high);
// Merge the sorted lower half sub-array with the upper half.
merge(a, low, middle, high);
}
}
/**
* The indexes "low", "middle", and "high" are passed to denote two already
* sorted sub-arrays within "a". This method merges these two sorted
* sub-arrays by iteratively choosing the smallest element from each sub-array.
*
* NOTE: The "new" creation of two arrays could be optimized, by creating
* a working array once and passing it into this method.
*/
@SuppressWarnings("unchecked")
private static void merge(Object[] a, int low, int middle, int high) {
assert(low <= middle);
assert(middle < high);
// Copy two already-sorted sub-arrays from "a" into "lower" and "upper".
// First (lower): array "a" from "low" to "middle".
// Second (upper): array "a" from "middle + 1" to "high".
// Invariant: arrays "lower" and "upper" are already sorted.
//
int lowerSize = middle - low + 1;
Object[] lower = new Object[lowerSize];
System.arraycopy(a, low, lower, 0, lowerSize);
int upperSize = high - middle;
Object[] upper = new Object[upperSize];
System.arraycopy(a, middle + 1, upper, 0, upperSize);
// Merge arrays "lower" and "upper". Choose the next smallest
// value from either "lower" or "upper". Write the values back into "a"
// from index "low" until index "high".
//
int lowerIdx = 0;
int upperIdx = 0;
for (int i = low; i <= high; i++) {
// Need to check if we've exhausted members from "lower" or "upper".
if ((upperIdx >= upper.length) ||
((lowerIdx < lower.length) &&
((Comparable) lower[lowerIdx]).compareTo(upper[upperIdx]) < 0)) {
a[i] = lower[lowerIdx++];
} else {
a[i] = upper[upperIdx++];
}
}
}
/**
* Sorts the passed list using O(n log n) quick sort (expected case), and
* O(n^2) in the degenerate worst case. Modifies the passed list. Sorts
* in ascending order.
*
* The list is first copied into an array, since we use indexes
* for this algorithm. After the array is sorted, it is written
* back into the passed list.
*
* NOTE: We can optimize this algorithm by coding a method to ensure
* the pivot is close to the median value of the array each time.
*
* @param list The list to be sorted.
* @param T generic type of list element, which must implement Comparable.
*/
@SuppressWarnings("unchecked")
public static <T extends Comparable<T>> void quickSort(List<T> list) {
assert(list != null);
// First, copy the list into an array.
Object[] a = list.toArray();
// Call quick sort on entire array.
quickSort(a, 0, a.length - 1);
// Write the sorted array back into the list.
arrayIntoList(a, list);
}
/**
* Recursive method which chooses a "pivot", then places all
* all values less than the "pivot" at the left of the array
* (not necessarily ordered). We finally place the "pivot"
* in between the lesser values and the greater (or equal) values.
* This places the "pivot" in it's final position. Finally, recurse
* on the lesser values to the left of the "pivot", and the greater
* or equal values to the right of the "pivot".
*
* NOTE: Choosing a "pivot" which is about the median value
* would guarantee O(n log n) running time.
*/
@SuppressWarnings("unchecked")
private static void quickSort(Object[] a, int low, int high) {
assert(a != null);
assert(high < a.length);
// Base case: low == high means a one element sized array.
if (low < high) {
int pivot = high; // Choose pivot at the end of the array.
int wall = low;
// Move all items less than the pivot to the left of the
// "wall", while all the items larger than the pivot remain
// to the right of the "wall".
//
for (int current = low; current <= high; current++) {
if (((Comparable) a[current]).compareTo(a[pivot]) < 0) {
swap(a, current, wall);
wall++;
}
}
// Finally, place the pivot in between the lesser values
// and the greater values at the "wall", which is it's
// final position.
//
swap(a, pivot, wall);
// Recurse on the values less than the pivot.
quickSort(a, low, wall - 1);
// Recurse on the values greater than the pivot.
quickSort(a, wall + 1, high);
}
}
/**
* Sorts the passed list using O(n log n) heap sort. The items are
* sorted in ascending order. This algorithm uses a data structure
* called a heap. A heap is a binary tree which maintains a partial
* ordering where the parent "dominates" the two children.
*
* The list is first copied into an array, since we use indexes
* for this algorithm. After the array is sorted, it is written
* back into the passed list.
*
* @param list The list to be sorted.
* @param T generic type of list element, which must implement Comparable.
*/
@SuppressWarnings("unchecked")
public static <T extends Comparable<T>> void heapSort(List<T> list) {
assert(list != null);
// First, copy the list into an array.
Object[] a = list.toArray();
// First, create the heap.
heapify(a);
// Iteratively grab the max element from the top of the heap,
// then ensure the heap property.
//
for (int i = a.length - 1; i > 0; i--) {
swap(a, 0, i);
bubbleDown(a, 0, (i - 1));
}
// Write the sorted array back into the list.
arrayIntoList(a, list);
}
/**
* For all non-leaf nodes, fix heap property.
*/
@SuppressWarnings("unchecked")
private static void heapify(Object[] a) {
assert(a != null);
int parentIdx = 0;
int lastIdx = a.length - 1;
while (hasLeftChild(parentIdx, lastIdx)) {
int largestChildIdx = largestChild(a, parentIdx, lastIdx);
if (((Comparable) a[largestChildIdx]).compareTo(a[parentIdx]) > 0) {
swap(a, parentIdx, largestChildIdx);
}
parentIdx++;
}
}
/**
* Continually swaps the parent element with the largest child
* element until the heap property is restored or the parent element
* ends up as a leaf.
*/
@SuppressWarnings("unchecked")
private static void bubbleDown(Object[] a, int parentIdx, int lastIdx) {
assert(a != null);
assert(parentIdx >= 0);
assert(parentIdx <= lastIdx);
boolean swapped = true;
// hasLeftChild() determines if the parent has any children.
while (swapped && hasLeftChild(parentIdx, lastIdx)) {
// Swapping with the largest child is necessary to maintain heap property.
int largestChildIdx = largestChild(a, parentIdx, lastIdx);
if (((Comparable) a[largestChildIdx]).compareTo(a[parentIdx]) > 0) {
swap(a, parentIdx, largestChildIdx);
parentIdx = largestChildIdx;
} else {
swapped = false;
}
}
}
/**
* Returns the index of the larger of a parents two children. Assumes
* at least the left child exists.
*/
@SuppressWarnings("unchecked")
private static int largestChild(Object[] a, int parentIdx, int lastIdx) {
assert(hasLeftChild(parentIdx, lastIdx));
int leftChildIdx = leftChild(parentIdx);
int largestChildIdx = leftChildIdx;
if (hasRightChild(parentIdx, lastIdx)) {
int rightChildIdx = rightChild(parentIdx);
if (((Comparable) a[rightChildIdx]).compareTo(a[leftChildIdx]) > 0) {
largestChildIdx = rightChildIdx;
}
}
return largestChildIdx;
}
// Returns index of left child assuming 0-based arrays.
private static int leftChild(int parentIdx) {
return ((2 * parentIdx) + 1);
}
// Returns index of right child assuming 0-based arrays.
private static int rightChild(int parentIdx) {
return ((2 * parentIdx) + 2);
}
private static boolean hasLeftChild(int parentIdx, int lastIdx) {
return (leftChild(parentIdx) <= lastIdx);
}
private static boolean hasRightChild(int parentIdx, int lastIdx) {
return (rightChild(parentIdx) <= lastIdx);
}
/**
* Sorts the passed list using shell sort. Modifies the passed list.
* Shell sort is a generalization of insertion sort. This sort starts
* by sorting pairs of elements far away from each other, then progressively
* reducing this "gap". Analysis of Shell sort is complicated, and depends
* on the "gap". This implementation halves the "gap" at each iteration, and
* the worst-case asymptotic runtime is O(n^2).
*
* Sorts elements in ascending order.
*
* The list is first copied into an array, since we use indexes
* for this algorithm. After the array is sorted, it is written
* back into the passed list.
*
* @param list The list to be sorted.
* @param T generic type of list element, which must implement Comparable.
*/
@SuppressWarnings("unchecked")
public static <T extends Comparable<T>> void shellSort(List<T> list) {
assert(list != null);
// First, copy the list into an array.
Object[] a = list.toArray();
// Calculate the gap which defines the sublists. When the gap is
// 1, then shell sort is the same as insertion sort.
//
int gap = (int) Math.floor(a.length / 2);
while (gap >= 1) {
for (int end = gap; end < a.length; end++) {
int j = end;
int i = j - gap;
while ((i >= 0) && ((Comparable) a[i]).compareTo(a[j]) > 0) {
swap(a, i, j);
j = i;
i = j - gap;
}
}
gap = (int) Math.floor(gap / 2); // Reduce the gap, until it is 1.
}
// Write the sorted array back into the list.
arrayIntoList(a, list);
}
/**
* Sorts the passed list using O(n^2) insertion sort. Modifies the
* passed list.
*
* Sorts elements in ascending order.
*
* The list is first copied into an array, since we use indexes
* for this algorithm. After the array is sorted, it is written
* back into the passed list.
*
* @param list The list to be sorted.
* @param T generic type of list element, which must implement Comparable.
*/
@SuppressWarnings("unchecked")
public static <T extends Comparable<T>> void insertionSort(List<T> list) {
assert(list != null);
// First, copy the list into an array.
Object[] a = list.toArray();
// For each element in the array, swap it into the lower position in the
// array, until it is in the correct position.
//
for (int i = 1; i < a.length; i++) {
int j = i;
while (j > 0 && ((Comparable) a[j - 1]).compareTo(a[j]) > 0) {
swap(a, j, j - 1);
j--;
}
}
// Write the sorted array back into the list.
arrayIntoList(a, list);
}
/**
* Sorts the passed list using O(n^2) selection sort. Modifies the
* passed list.
*
* Sorts elements in ascending order.
*
* The list is first copied into an array, since we use indexes
* for this algorithm. After the array is sorted, it is written
* back into the passed list.
*
* @param list The list to be sorted.
* @param T generic type of list element, which must implement Comparable.
*/
@SuppressWarnings("unchecked")
public static <T extends Comparable<T>> void selectionSort(List<T> list) {
assert(list != null);
// First, copy the list into an array.
Object[] a = list.toArray();
// For each position in the array, find the minimum value in the
// remaining sub-array and swap it into this position.
//
for (int i = 0; i < a.length; i++) {
int min = i;
for (int j = i + 1; j < a.length; j++) {
if (((Comparable) a[min]).compareTo(a[j]) > 0) {
min = j;
}
}
swap(a, i, min);
}
// Write the sorted array back into the list.
arrayIntoList(a, list);
}
/**
* Sorts the passed list using O(n^2) bubble sort. Modifies the
* passed list.
*
* Sorts elements in ascending order.
*
* The list is first copied into an array, since we use indexes
* for this algorithm. After the array is sorted, it is written
* back into the passed list.
*
* @param list The List to be sorted.
* @param T generic type of List element, which must implement Comparable.
*/
@SuppressWarnings("unchecked")
public static <T extends Comparable<T>> void bubbleSort(List<T> list) {
assert(list != null);
// First, copy the list into an array.
Object[] a = list.toArray();
// Swap adjacent members if they are out of order, "bubbling" the
// largest value to the end of the array. At the end of each step of
// the outer loop, the current largest value has been "bubbled" to
// the end of the array.
//
for (int i = a.length; i > 0; i--) {
for (int j = 1; j < i; j++) {
if (((Comparable) a[j - 1]).compareTo(a[j]) > 0) {
swap(a, j, j - 1);
}
}
}
// Write the sorted array back into the list.
arrayIntoList(a, list);
}
/**
* Helper method which exchanges elements in array "a" at indexes
* "index1" and "index2".
*/
private static void swap(Object[] a, int index1, int index2) {
assert(a.length > 0);
assert(index1 < a.length);
assert(index2 < a.length);
if (index1 != index2) {
Object temp = a[index1];
a[index1] = a[index2];
a[index2] = temp;
}
}
/**
* Helper method which writes the elements of sorted array "a"
* into "list".
*/
@SuppressWarnings("unchecked")
private static <T> void arrayIntoList(Object[] a, List<T> list) {
assert(a.length == list.size());
ListIterator<T> iterator = list.listIterator();
for (int i = 0; i < a.length; i++) {
iterator.next();
iterator.set((T) a[i]);
}
}
public static List<Integer> createTestList(int size) {
Random rand = new Random();
List<Integer> list = new ArrayList<Integer>(size);
for (int i = 0; i < size; i++) {
int randomInt = rand.nextInt(size);
list.add(randomInt);
}
return list;
}
public static <T extends Comparable<T>> boolean validateSorted(List<T> list) {
boolean sorted = true;
ListIterator<T> iterator = list.listIterator();
if (iterator.hasNext()) {
T prev = iterator.next();
while (iterator.hasNext()) {
T current = iterator.next();
if ((prev).compareTo(current) > 0) {
sorted = false;
break;
}
prev = current;
}
}
return sorted;
}
public static void main(String [] args) {
System.out.println("STARTING Sort Test...");
System.out.println();
List<Integer> testListSizes = Arrays.asList(1, 2, 3, 10000);
List<Integer> testList;
// Test merge sort.
System.out.println("Merge Sort");
for (int listSize : testListSizes) {
testList = createTestList(listSize);
mergeSort(testList);
assert(validateSorted(testList));
}
System.out.println();
// Test quick sort.
System.out.println("Quick Sort");
for (int listSize : testListSizes) {
testList = createTestList(listSize);
quickSort(testList);
assert(validateSorted(testList));
}
System.out.println();
// Test heap sort.
System.out.println("Heap Sort");
for (int listSize : testListSizes) {
testList = createTestList(listSize);
heapSort(testList);
assert(validateSorted(testList));
}
System.out.println();
// Test shell sort.
System.out.println("Shell Sort");
for (int listSize : testListSizes) {
testList = createTestList(listSize);
insertionSort(testList);
assert(validateSorted(testList));
}
System.out.println();
// Test insertion sort.
System.out.println("Insertion Sort");
for (int listSize : testListSizes) {
testList = createTestList(listSize);
insertionSort(testList);
assert(validateSorted(testList));
}
System.out.println();
// Test selection sort.
System.out.println("Selection Sort");
for (int listSize : testListSizes) {
testList = createTestList(listSize);
selectionSort(testList);
assert(validateSorted(testList));
}
System.out.println();
// Test bubble sort.
System.out.println("Bubble Sort");
for (int listSize : testListSizes) {
testList = createTestList(listSize);
bubbleSort(testList);
assert(validateSorted(testList));
}
System.out.println();
System.out.println("FINISHED Sort Test");
}
}
| |
package com.haurentziu.starchart;
import com.haurentziu.planets.VSOPLoader;
import com.haurentziu.planets.VSOPVariable;
import com.haurentziu.render.*;
import com.haurentziu.render.Markings;
import com.jogamp.opengl.*;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
/**
* Created by haurentziu on 27.04.2016.
*/
public class GLStarchart implements GLEventListener{
private Rectangle2D ortoBounds;
private Rectangle2D windowBounds;
public static Observer observer;
public static boolean showGround = true; //
public static boolean showCardinalPoints = true;
public static boolean showConstellations = true; //
public static boolean showAzGrid = false; //
public static boolean showEcliptic = true; //
public static boolean showCelestialEq = true; //
public static boolean showStarNames = true;
public static boolean showEqGrid = false; //
public static boolean showDSO = true; //
public static boolean showMilkyWay = true;
public static boolean isPaused = false;
private Stars stars;
private Constellations constellations;
private DeepSpaceObjects messierObjects;
private Markings markings;
private Ground ground;
private SolarSystem solarSystem;
private AstroText astroText;
private VBO vbo;
public static int currentWarp = 7;
public static int timeWarpLevels[] = {-10000, -5000, -3000, -1000, -100, -10, -1, 1, 10, 100, 1000, 3000, 5000, 10000};
GLStarchart(){
observer = new Observer();
vbo = new VBO();
stars = new Stars("./shader/vertex.glsl", "./shader/stars_geom.glsl", "./shader/star_frag.glsl");
constellations = new Constellations("./shader/vertex.glsl", "./shader/const_geom.glsl", "./shader/const_frag.glsl");
messierObjects = new DeepSpaceObjects("./shader/vertex.glsl", "./shader/messier_geom.glsl", "./shader/messier_frag.glsl");
markings = new Markings("./shader/vertex.glsl", "./shader/marking_geom.glsl", "./shader/marking_frag.glsl");
ground = new Ground("./shader/vertex.glsl", "./shader/ground_geom.glsl", "./shader/ground_frag.glsl");
astroText = new AstroText("./shader/vertex.glsl", "./shader/text_geom.glsl", "./shader/text_frag.glsl");
solarSystem = new SolarSystem();
ortoBounds = new Rectangle2D.Double();
windowBounds = new Rectangle2D.Double();
}
@Override
public void init(GLAutoDrawable glAutoDrawable) {
GL3 gl = glAutoDrawable.getGL().getGL3();
stars.initialize(gl);
constellations.initialize(gl);
constellations.loadConstellations(stars.getStarsArray());
messierObjects.initialize(gl);
markings.initialize(gl);
ground.initialize(gl);
ArrayList<Float> vertsList = new ArrayList<>();
ArrayList<Float> colorList = new ArrayList<>();
solarSystem.loadVertices(vertsList);
solarSystem.loadColor(colorList);
stars.loadVertices(vertsList);
stars.loadColor(colorList);
constellations.loadVertices(vertsList);
messierObjects.loadVertices(vertsList);
markings.loadAllVertices(vertsList);
ground.loadVertices(vertsList);
vbo.init(gl, vertsList, colorList);
glAutoDrawable.getAnimator().setUpdateFPSFrames(20, null);
}
@Override
public void dispose(GLAutoDrawable glAutoDrawable) {
System.err.println("Closing application...");
GL3 gl = glAutoDrawable.getGL().getGL3();
stars.delete(gl);
constellations.delete(gl);
messierObjects.delete(gl);
markings.delete(gl);
ground.delete(gl);
vbo.delete(gl);
}
@Override
public void display(GLAutoDrawable glAutoDrawable) {
GL3 gl = glAutoDrawable.getGL().getGL3();
gl.glEnable(GL.GL_BLEND);
gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA);
gl.glClearColor(0f, 0.075f, 0.125f, 1f);
gl.glClear(GL3.GL_COLOR_BUFFER_BIT | GL3.GL_DEPTH_BUFFER_BIT);
if(isPaused) {
observer.updateTime(0);
}
else{
observer.updateTime(timeWarpLevels[currentWarp]);
}
// System.out.println(glAutoDrawable.getAnimator().getLastFPS());
if(showMilkyWay || showEqGrid || showAzGrid || showEcliptic || showCelestialEq){
markings.renderAll(gl, observer, showMilkyWay, showAzGrid, showEqGrid, showCelestialEq, showEcliptic);
}
if(showDSO){
messierObjects.render(gl, observer);
}
if(showConstellations) {
constellations.render(gl, observer);
}
stars.render(gl, observer.getMaxMagnitude(), observer);
solarSystem.render(gl, stars.getShader(), vbo.getBuffers(), observer.getJDE());
if(showGround) {
ground.render(gl, observer);
}
}
@Override
public void reshape(GLAutoDrawable glAutoDrawable, int i, int i1, int i2, int i3) {
final GL3 gl = glAutoDrawable.getGL().getGL3();
float aspectRatio = (float)i2/i3;
stars.setSize(gl, aspectRatio, 1f);
constellations.setSize(gl, aspectRatio, 1f);
messierObjects.setSize(gl, aspectRatio, 1f);
markings.setSize(gl, aspectRatio, 1f);
ground.setSize(gl, aspectRatio, 1f);
ortoBounds.setRect(-aspectRatio, - 2, 2 * aspectRatio, 2);
windowBounds.setRect(0, 0, i2, i3);
observer.updateZoom(ortoBounds);
System.out.println(i2 + " " + i3);
}
Observer getObserver(){
return observer;
}
Rectangle2D getBounds(){
return ortoBounds;
}
Rectangle2D getWindowBounds(){
return windowBounds;
}
public static void changeWarp(int amount){
int newWarp = amount + currentWarp;
if(newWarp >= 0 && newWarp < timeWarpLevels.length){
currentWarp = newWarp;
}
}
public static void tooglePause(){
isPaused = !isPaused;
}
public static void setDefault(){
currentWarp = 7;
}
public static void toogleGround(){
showGround = !showGround;
}
public static void tooglePoints(){
showCardinalPoints = !showCardinalPoints;
}
public static void toogleConstellations(){
showConstellations = !showConstellations;
}
public static void toogleAzGrid(){
showAzGrid = !showAzGrid;
}
public static void toogleEcliptic(){
showEcliptic = !showEcliptic;
}
public static void toogleEqGrid(){
showEqGrid = !showEqGrid;
}
public static void toogleStarNames(){
showStarNames = !showStarNames;
}
public static void toogleCelestialEq(){
showCelestialEq = !showCelestialEq;
}
public static void toogleMilkyWay(){
showMilkyWay = !showMilkyWay;
}
public static void toogleDSO(){
showDSO = !showDSO;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.tasks;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.NoSuchNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.ChildTaskRequest;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BaseTransportResponseHandler;
import org.elasticsearch.transport.NodeShouldNotConnectException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.function.Consumer;
import java.util.function.Supplier;
/**
* The base class for transport actions that are interacting with currently running tasks.
*/
public abstract class TransportTasksAction<
OperationTask extends Task,
TasksRequest extends BaseTasksRequest<TasksRequest>,
TasksResponse extends BaseTasksResponse,
TaskResponse extends Writeable<TaskResponse>
> extends HandledTransportAction<TasksRequest, TasksResponse> {
protected final ClusterName clusterName;
protected final ClusterService clusterService;
protected final TransportService transportService;
protected final Supplier<TasksRequest> requestSupplier;
protected final Supplier<TasksResponse> responseSupplier;
protected final String transportNodeAction;
protected TransportTasksAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool,
ClusterService clusterService, TransportService transportService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, Supplier<TasksRequest> requestSupplier,
Supplier<TasksResponse> responseSupplier,
String nodeExecutor) {
super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, requestSupplier);
this.clusterName = clusterName;
this.clusterService = clusterService;
this.transportService = transportService;
this.transportNodeAction = actionName + "[n]";
this.requestSupplier = requestSupplier;
this.responseSupplier = responseSupplier;
transportService.registerRequestHandler(transportNodeAction, NodeTaskRequest::new, nodeExecutor, new NodeTransportHandler());
}
@Override
protected final void doExecute(TasksRequest request, ActionListener<TasksResponse> listener) {
logger.warn("attempt to execute a transport tasks operation without a task");
throw new UnsupportedOperationException("task parameter is required for this operation");
}
@Override
protected void doExecute(Task task, TasksRequest request, ActionListener<TasksResponse> listener) {
new AsyncAction(task, request, listener).start();
}
private NodeTasksResponse nodeOperation(NodeTaskRequest nodeTaskRequest) {
TasksRequest request = nodeTaskRequest.tasksRequest;
List<TaskResponse> results = new ArrayList<>();
List<TaskOperationFailure> exceptions = new ArrayList<>();
processTasks(request, task -> {
try {
TaskResponse response = taskOperation(request, task);
if (response != null) {
results.add(response);
}
} catch (Exception ex) {
exceptions.add(new TaskOperationFailure(clusterService.localNode().id(), task.getId(), ex));
}
});
return new NodeTasksResponse(clusterService.localNode().id(), results, exceptions);
}
protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) {
return nodesIds;
}
protected String[] resolveNodes(TasksRequest request, ClusterState clusterState) {
if (request.taskId().isSet()) {
return clusterState.nodes().resolveNodesIds(request.nodesIds());
} else {
return new String[]{request.taskId().getNodeId()};
}
}
protected void processTasks(TasksRequest request, Consumer<OperationTask> operation) {
if (request.taskId().isSet() == false) {
// we are only checking one task, we can optimize it
Task task = taskManager.getTask(request.taskId().getId());
if (task != null) {
if (request.match(task)) {
operation.accept((OperationTask) task);
} else {
throw new ResourceNotFoundException("task [{}] doesn't support this operation", request.taskId());
}
} else {
throw new ResourceNotFoundException("task [{}] is missing", request.taskId());
}
} else {
for (Task task : taskManager.getTasks().values()) {
if (request.match(task)) {
operation.accept((OperationTask) task);
}
}
}
}
protected abstract TasksResponse newResponse(TasksRequest request, List<TaskResponse> tasks, List<TaskOperationFailure>
taskOperationFailures, List<FailedNodeException> failedNodeExceptions);
@SuppressWarnings("unchecked")
protected TasksResponse newResponse(TasksRequest request, AtomicReferenceArray responses) {
List<TaskResponse> tasks = new ArrayList<>();
List<FailedNodeException> failedNodeExceptions = new ArrayList<>();
List<TaskOperationFailure> taskOperationFailures = new ArrayList<>();
for (int i = 0; i < responses.length(); i++) {
Object response = responses.get(i);
if (response instanceof FailedNodeException) {
failedNodeExceptions.add((FailedNodeException) response);
} else {
NodeTasksResponse tasksResponse = (NodeTasksResponse) response;
if (tasksResponse.results != null) {
tasks.addAll(tasksResponse.results);
}
if (tasksResponse.exceptions != null) {
taskOperationFailures.addAll(tasksResponse.exceptions);
}
}
}
return newResponse(request, tasks, taskOperationFailures, failedNodeExceptions);
}
protected abstract TaskResponse readTaskResponse(StreamInput in) throws IOException;
protected abstract TaskResponse taskOperation(TasksRequest request, OperationTask task);
protected boolean transportCompress() {
return false;
}
protected abstract boolean accumulateExceptions();
private class AsyncAction {
private final TasksRequest request;
private final String[] nodesIds;
private final DiscoveryNode[] nodes;
private final ActionListener<TasksResponse> listener;
private final AtomicReferenceArray<Object> responses;
private final AtomicInteger counter = new AtomicInteger();
private final Task task;
private AsyncAction(Task task, TasksRequest request, ActionListener<TasksResponse> listener) {
this.task = task;
this.request = request;
this.listener = listener;
ClusterState clusterState = clusterService.state();
String[] nodesIds = resolveNodes(request, clusterState);
this.nodesIds = filterNodeIds(clusterState.nodes(), nodesIds);
ImmutableOpenMap<String, DiscoveryNode> nodes = clusterState.nodes().nodes();
this.nodes = new DiscoveryNode[nodesIds.length];
for (int i = 0; i < nodesIds.length; i++) {
this.nodes[i] = nodes.get(nodesIds[i]);
}
this.responses = new AtomicReferenceArray<>(this.nodesIds.length);
}
private void start() {
if (nodesIds.length == 0) {
// nothing to do
try {
listener.onResponse(newResponse(request, responses));
} catch (Throwable t) {
logger.debug("failed to generate empty response", t);
listener.onFailure(t);
}
} else {
TransportRequestOptions.Builder builder = TransportRequestOptions.builder();
if (request.timeout() != null) {
builder.withTimeout(request.timeout());
}
builder.withCompress(transportCompress());
for (int i = 0; i < nodesIds.length; i++) {
final String nodeId = nodesIds[i];
final int idx = i;
final DiscoveryNode node = nodes[i];
try {
if (node == null) {
onFailure(idx, nodeId, new NoSuchNodeException(nodeId));
} else {
NodeTaskRequest nodeRequest = new NodeTaskRequest(request);
nodeRequest.setParentTask(clusterService.localNode().id(), task.getId());
taskManager.registerChildTask(task, node.getId());
transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(),
new BaseTransportResponseHandler<NodeTasksResponse>() {
@Override
public NodeTasksResponse newInstance() {
return new NodeTasksResponse();
}
@Override
public void handleResponse(NodeTasksResponse response) {
onOperation(idx, response);
}
@Override
public void handleException(TransportException exp) {
onFailure(idx, node.id(), exp);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
}
} catch (Throwable t) {
onFailure(idx, nodeId, t);
}
}
}
}
private void onOperation(int idx, NodeTasksResponse nodeResponse) {
responses.set(idx, nodeResponse);
if (counter.incrementAndGet() == responses.length()) {
finishHim();
}
}
private void onFailure(int idx, String nodeId, Throwable t) {
if (logger.isDebugEnabled() && !(t instanceof NodeShouldNotConnectException)) {
logger.debug("failed to execute on node [{}]", t, nodeId);
}
if (accumulateExceptions()) {
responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t));
}
if (counter.incrementAndGet() == responses.length()) {
finishHim();
}
}
private void finishHim() {
TasksResponse finalResponse;
try {
finalResponse = newResponse(request, responses);
} catch (Throwable t) {
logger.debug("failed to combine responses from nodes", t);
listener.onFailure(t);
return;
}
listener.onResponse(finalResponse);
}
}
class NodeTransportHandler implements TransportRequestHandler<NodeTaskRequest> {
@Override
public void messageReceived(final NodeTaskRequest request, final TransportChannel channel) throws Exception {
channel.sendResponse(nodeOperation(request));
}
}
private class NodeTaskRequest extends ChildTaskRequest {
private TasksRequest tasksRequest;
protected NodeTaskRequest() {
super();
}
protected NodeTaskRequest(TasksRequest tasksRequest) {
super();
this.tasksRequest = tasksRequest;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
tasksRequest = requestSupplier.get();
tasksRequest.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
tasksRequest.writeTo(out);
}
}
private class NodeTasksResponse extends TransportResponse {
protected String nodeId;
protected List<TaskOperationFailure> exceptions;
protected List<TaskResponse> results;
public NodeTasksResponse() {
}
public NodeTasksResponse(String nodeId,
List<TaskResponse> results,
List<TaskOperationFailure> exceptions) {
this.nodeId = nodeId;
this.results = results;
this.exceptions = exceptions;
}
public String getNodeId() {
return nodeId;
}
public List<TaskOperationFailure> getExceptions() {
return exceptions;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
nodeId = in.readString();
int resultsSize = in.readVInt();
results = new ArrayList<>(resultsSize);
for (; resultsSize > 0; resultsSize--) {
final TaskResponse result = in.readBoolean() ? readTaskResponse(in) : null;
results.add(result);
}
if (in.readBoolean()) {
int taskFailures = in.readVInt();
exceptions = new ArrayList<>(taskFailures);
for (int i = 0; i < taskFailures; i++) {
exceptions.add(new TaskOperationFailure(in));
}
} else {
exceptions = null;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(nodeId);
out.writeVInt(results.size());
for (TaskResponse result : results) {
if (result != null) {
out.writeBoolean(true);
result.writeTo(out);
} else {
out.writeBoolean(false);
}
}
out.writeBoolean(exceptions != null);
if (exceptions != null) {
int taskFailures = exceptions.size();
out.writeVInt(taskFailures);
for (TaskOperationFailure exception : exceptions) {
exception.writeTo(out);
}
}
}
}
}
| |
/*
* Copyright 2013 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may obtain a copy of the License
* at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in
* writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package com.twitter.hraven.etl;
import java.io.EOFException;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.commons.configuration.ConversionException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapred.JobHistoryCopy.RecordTypes;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import com.google.common.collect.Maps;
import com.twitter.hraven.AppKey;
import com.twitter.hraven.AppSummary;
import com.twitter.hraven.Constants;
import com.twitter.hraven.JobDetails;
import com.twitter.hraven.JobHistoryKeys;
import com.twitter.hraven.JobKey;
import com.twitter.hraven.TaskKey;
import com.twitter.hraven.util.ByteArrayWrapper;
import com.twitter.hraven.datasource.JobKeyConverter;
import com.twitter.hraven.datasource.ProcessingException;
import com.twitter.hraven.datasource.TaskKeyConverter;
/**
* Deal with JobHistory file parsing for job history files which are generated after MAPREDUCE-1016
* (hadoop 1.x (0.21 and later) and hadoop 2.x)
*/
public class JobHistoryFileParserHadoop2 extends JobHistoryFileParserBase {
private JobKey jobKey;
/** Job ID, minus the leading "job_" */
private String jobNumber = "";
private byte[] jobKeyBytes;
private JobDetails jobDetails = null;
private List<Put> jobPuts = new LinkedList<Put>();
private List<Put> taskPuts = new LinkedList<Put>();
boolean uberized = false;
/**
* Stores the terminal status of the job
*
* Since this history file is placed hdfs at mapreduce.jobhistory.done-dir
* only upon job termination, we ensure that we store the status seen
* only in one of the terminal state events in the file like
* JobFinished(JOB_FINISHED) or JobUnsuccessfulCompletion(JOB_FAILED, JOB_KILLED)
*
* Ideally, each terminal state event like JOB_FINISHED, JOB_FAILED, JOB_KILLED
* should contain the jobStatus field and we would'nt need this extra processing
* But presently, in history files, only JOB_FAILED, JOB_KILLED events
* contain the jobStatus field where as JOB_FINISHED event does not,
* hence this extra processing
*/
private String jobStatus = "";
/** hadoop2 JobState enum:
* NEW, INITED, RUNNING, SUCCEEDED, FAILED, KILL_WAIT, KILLED, ERROR
*/
public static final String JOB_STATUS_SUCCEEDED = "SUCCEEDED";
// initialize mb millis to NOTFOUND_VALUE
// since older versions of hadoop2 did not have this counter
private long mapMbMillis = Constants.NOTFOUND_VALUE;
private long reduceMbMillis = Constants.NOTFOUND_VALUE;
private static final String LAUNCH_TIME_KEY_STR = JobHistoryKeys.LAUNCH_TIME.toString();
private static final String FINISH_TIME_KEY_STR = JobHistoryKeys.FINISH_TIME.toString();
private JobKeyConverter jobKeyConv = new JobKeyConverter();
private TaskKeyConverter taskKeyConv = new TaskKeyConverter();
private static final String AM_ATTEMPT_PREFIX = "AM_";
private static final String TASK_PREFIX = "task_";
private static final String TASK_ATTEMPT_PREFIX = "attempt_";
private static final Log LOG = LogFactory.getLog(JobHistoryFileParserHadoop2.class);
private Schema schema;
private Decoder decoder;
private DatumReader<GenericRecord> reader;
private static final String TYPE = "type";
private static final String EVENT = "event";
private static final String NAME = "name";
private static final String FIELDS = "fields";
private static final String COUNTS = "counts";
private static final String GROUPS = "groups";
private static final String VALUE = "value";
private static final String TASKID = "taskid";
private static final String APPLICATION_ATTEMPTID = "applicationAttemptId";
private static final String ATTEMPTID = "attemptId";
private static final String TYPE_INT = "int";
private static final String TYPE_BOOLEAN = "boolean";
private static final String TYPE_LONG = "long";
private static final String TYPE_STRING = "String";
/** only acls in the job history file seem to be of this type: map of strings */
private static final String TYPE_MAP_STRINGS = "{\"type\":\"map\",\"values\":\"string\"}";
/**
* vMemKbytes, clockSplit, physMemKbytes, cpuUsages are arrays of ints See MAPREDUCE-5432
*/
private static final String TYPE_ARRAY_INTS = "{\"type\":\"array\",\"items\":\"int\"}";
/** this is part of {@link org.apache.hadoop.mapreduce.jobhistory.TaskFailedEvent.java} */
private static final String NULL_STRING = "[\"null\",\"string\"]";
public static enum Hadoop2RecordType {
/**
* populating this map since the symbols and key to get the types of fields the symbol denotes
* the record in the file (like JOB_SUBMITTED) and it's value in the map (like JobSubmitted)
* helps us get the types of fields that that record contains (this type information is present
* in the schema)
*/
JobFinished("JOB_FINISHED"),
JobInfoChange("JOB_INFO_CHANGED"),
JobInited("JOB_INITED"),
AMStarted("AM_STARTED"),
JobPriorityChange("JOB_PRIORITY_CHANGED"),
JobStatusChanged("JOB_STATUS_CHANGED"),
JobQueueChange("JOB_QUEUE_CHANGED"),
JobSubmitted("JOB_SUBMITTED"),
JobUnsuccessfulCompletion("JOB_KILLED","JOB_FAILED"),
MapAttemptFinished("MAP_ATTEMPT_FINISHED"),
ReduceAttemptFinished("REDUCE_ATTEMPT_FINISHED"),
TaskAttemptFinished("CLEANUP_ATTEMPT_FINISHED"),
TaskAttemptStarted("CLEANUP_ATTEMPT_STARTED",
"SETUP_ATTEMPT_STARTED",
"REDUCE_ATTEMPT_STARTED",
"MAP_ATTEMPT_STARTED"),
TaskAttemptUnsuccessfulCompletion("CLEANUP_ATTEMPT_KILLED",
"CLEANUP_ATTEMPT_FAILED",
"SETUP_ATTEMPT_KILLED",
"SETUP_ATTEMPT_FAILED",
"REDUCE_ATTEMPT_KILLED",
"REDUCE_ATTEMPT_FAILED",
"MAP_ATTEMPT_KILLED",
"MAP_ATTEMPT_FAILED"),
TaskFailed("TASK_FAILED"),
TaskFinished("TASK_FINISHED"),
TaskStarted("TASK_STARTED"),
TaskUpdated("TASK_UPDATED");
private final String[] recordNames;
private Hadoop2RecordType(String... recordNames) {
if (recordNames != null) {
this.recordNames = recordNames;
} else {
this.recordNames = new String[0];
}
}
public String[] getRecordNames() {
return recordNames;
}
}
public static enum CounterTypes {
counters, mapCounters, reduceCounters, totalCounters
}
private static Map<String,Hadoop2RecordType> EVENT_RECORD_NAMES = Maps.newHashMap();
private static final Set<String> COUNTER_NAMES = new HashSet<String>();
private Map<Hadoop2RecordType, Map<String, String>> fieldTypes =
new HashMap<Hadoop2RecordType, Map<String, String>>();
/**
* populates the COUNTER_NAMES hash set and EVENT_RECORD_NAMES hash map
*/
static {
/**
* populate the hash set for counter names
*/
for (CounterTypes ct : CounterTypes.values()) {
COUNTER_NAMES.add(ct.toString());
}
/**
* populate the hash map of EVENT_RECORD_NAMES
*/
for (Hadoop2RecordType t : Hadoop2RecordType.values()) {
for (String name : t.getRecordNames()) {
EVENT_RECORD_NAMES.put(name, t);
}
}
}
JobHistoryFileParserHadoop2(Configuration conf) {
super(conf);
}
/**
* {@inheritDoc}
*/
@Override
public void parse(byte[] historyFileContents, JobKey jobKey)
throws ProcessingException {
this.jobKey = jobKey;
this.jobKeyBytes = jobKeyConv.toBytes(jobKey);
this.jobDetails = new JobDetails(jobKey);
initializeJobDetails();
setJobId(jobKey.getJobId().getJobIdString());
try {
FSDataInputStream in =
new FSDataInputStream(new ByteArrayWrapper(historyFileContents));
/** first line is the version, ignore it */
String versionIgnore = in.readLine();
/** second line in file is the schema */
this.schema = schema.parse(in.readLine());
/** now figure out the schema */
understandSchema(schema.toString());
/** now read the rest of the file */
this.reader = new GenericDatumReader<GenericRecord>(schema);
this.decoder = DecoderFactory.get().jsonDecoder(schema, in);
GenericRecord record = null;
Hadoop2RecordType recType = null;
try {
while ((record = reader.read(null, decoder)) != null) {
if (record.get(TYPE) != null) {
recType = EVENT_RECORD_NAMES.get(record.get(TYPE).toString());
} else {
throw new ProcessingException("expected one of "
+ Arrays.asList(Hadoop2RecordType.values())
+ " \n but not found, cannot process this record! " + jobKey);
}
if (recType == null) {
throw new ProcessingException("new record type has surfaced: "
+ record.get(TYPE).toString() + " cannot process this record! " + jobKey);
}
// GenericRecord's get returns an Object
Object eDetails = record.get(EVENT);
// confirm that we got an "event" object
if (eDetails != null) {
JSONObject eventDetails = new JSONObject(eDetails.toString());
processRecords(recType, eventDetails);
} else {
throw new ProcessingException("expected event details but not found "
+ record.get(TYPE).toString() + " cannot process this record! " + jobKey);
}
}
} catch (EOFException eof) {
// not an error, simply end of file
LOG.info("Done parsing file, reached eof for " + jobKey);
}
} catch (IOException ioe) {
throw new ProcessingException(" Unable to parse history file in function parse, "
+ "cannot process this record!" + jobKey + " error: ", ioe);
} catch (JSONException jse) {
throw new ProcessingException(" Unable to parse history file in function parse, "
+ "cannot process this record! " + jobKey + " error: ", jse);
} catch (IllegalArgumentException iae) {
throw new ProcessingException(" Unable to parse history file in function parse, "
+ "cannot process this record! " + jobKey + " error: ", iae);
}
/*
* set the job status for this job once the entire file is parsed
* this has to be done separately
* since JOB_FINISHED event is missing the field jobStatus,
* where as JOB_KILLED and JOB_FAILED
* events are not so we need to look through the whole file to confirm
* the job status and then generate the put
*/
Put jobStatusPut = getJobStatusPut();
this.jobPuts.add(jobStatusPut);
// set the hadoop version for this record
Put versionPut = getHadoopVersionPut(JobHistoryFileParserFactory.getHistoryFileVersion2(), this.jobKeyBytes);
this.jobPuts.add(versionPut);
LOG.info("For " + this.jobKey + " #jobPuts " + jobPuts.size() + " #taskPuts: "
+ taskPuts.size());
}
/**
* set some initial values which help while later determining if
* values were found or not in the history file
*/
private void initializeJobDetails() {
this.jobDetails.setSubmitTime(Constants.NOTFOUND_VALUE);
this.jobDetails.setFinishTime(Constants.NOTFOUND_VALUE);
}
/**
* generates a put for job status
* @return Put that contains Job Status
*/
private Put getJobStatusPut() {
Put pStatus = new Put(jobKeyBytes);
byte[] valueBytes = Bytes.toBytes(this.jobStatus);
byte[] qualifier = Bytes.toBytes(JobHistoryKeys.JOB_STATUS.toString().toLowerCase());
pStatus.add(Constants.INFO_FAM_BYTES, qualifier, valueBytes);
return pStatus;
}
/**
* understand the schema so that we can parse the rest of the file
* @throws JSONException
*/
private void understandSchema(String schema) throws JSONException {
JSONObject j1 = new JSONObject(schema);
JSONArray fields = j1.getJSONArray(FIELDS);
String fieldName;
String fieldTypeValue;
Object recName;
for (int k = 0; k < fields.length(); k++) {
if (fields.get(k) == null) {
continue;
}
JSONObject allEvents = new JSONObject(fields.get(k).toString());
Object name = allEvents.get(NAME);
if (name != null) {
if (name.toString().equalsIgnoreCase(EVENT)) {
JSONArray allTypeDetails = allEvents.getJSONArray(TYPE);
for (int i = 0; i < allTypeDetails.length(); i++) {
JSONObject actual = (JSONObject) allTypeDetails.get(i);
JSONArray types = actual.getJSONArray(FIELDS);
Map<String, String> typeDetails = new HashMap<String, String>();
for (int j = 0; j < types.length(); j++) {
if (types.getJSONObject(j) == null ) {
continue;
}
fieldName = types.getJSONObject(j).getString(NAME);
fieldTypeValue = types.getJSONObject(j).getString(TYPE);
if ((fieldName != null) && (fieldTypeValue != null)) {
typeDetails.put(fieldName, fieldTypeValue);
}
}
recName = actual.get(NAME);
if (recName != null) {
/* the next statement may throw an IllegalArgumentException if
* it finds a new string that's not part of the Hadoop2RecordType enum
* that way we know what types of events we are parsing
*/
fieldTypes.put(Hadoop2RecordType.valueOf(recName.toString()), typeDetails);
}
}
}
}
}
}
/**
* process the counter details example line in .jhist file for counters: { "name":"MAP_COUNTERS",
* "groups":[ { "name":"org.apache.hadoop.mapreduce.FileSystemCounter",
* "displayName":"File System Counters", "counts":[ { "name":"HDFS_BYTES_READ",
* "displayName":"HDFS: Number of bytes read", "value":480 }, { "name":"HDFS_BYTES_WRITTEN",
* "displayName":"HDFS: Number of bytes written", "value":0 } ] }, {
* "name":"org.apache.hadoop.mapreduce.TaskCounter", "displayName":"Map-Reduce Framework",
* "counts":[ { "name":"MAP_INPUT_RECORDS", "displayName":"Map input records", "value":10 }, {
* "name":"MAP_OUTPUT_RECORDS", "displayName":"Map output records", "value":10 } ] } ] }
*/
private void processCounters(Put p, JSONObject eventDetails, String key) {
try {
JSONObject jsonCounters = eventDetails.getJSONObject(key);
String counterMetaGroupName = jsonCounters.getString(NAME);
JSONArray groups = jsonCounters.getJSONArray(GROUPS);
for (int i = 0; i < groups.length(); i++) {
JSONObject aCounter = groups.getJSONObject(i);
JSONArray counts = aCounter.getJSONArray(COUNTS);
for (int j = 0; j < counts.length(); j++) {
JSONObject countDetails = counts.getJSONObject(j);
populatePut(p, Constants.INFO_FAM_BYTES, counterMetaGroupName, aCounter.get(NAME)
.toString(), countDetails.get(NAME).toString(), countDetails.getLong(VALUE));
}
}
} catch (JSONException e) {
throw new ProcessingException(" Caught json exception while processing counters ", e);
}
}
/**
* process the event details as per their data type from schema definition
* @throws JSONException
*/
private void
processAllTypes(Put p, Hadoop2RecordType recType, JSONObject eventDetails, String key)
throws JSONException {
if (COUNTER_NAMES.contains(key)) {
processCounters(p, eventDetails, key);
} else {
String keyH2H1Mapping = JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING.get(key);
String type = fieldTypes.get(recType).get(key);
if (type.equalsIgnoreCase(TYPE_STRING)) {
// look for job status
if (JobHistoryKeys.JOB_STATUS.toString().equals(
JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING.get(key))) {
// store it only if it's one of the terminal state events
if ((recType.equals(Hadoop2RecordType.JobFinished))
|| (recType.equals(Hadoop2RecordType.JobUnsuccessfulCompletion))) {
this.jobStatus = eventDetails.getString(key);
}
} else {
String value = eventDetails.getString(key);
populatePut(p, Constants.INFO_FAM_BYTES, key, value);
}
} else if (type.equalsIgnoreCase(TYPE_LONG)) {
long value = eventDetails.getLong(key);
populatePut(p, Constants.INFO_FAM_BYTES, key, value);
// populate start time of the job for megabytemillis calculations
if ((recType.equals(Hadoop2RecordType.JobInited)) &&
LAUNCH_TIME_KEY_STR.equals(JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING.get(key))) {
this.jobDetails.setSubmitTime(value);
}
// populate end time of the job for megabytemillis calculations
if ((recType.equals(Hadoop2RecordType.JobFinished))
|| (recType.equals(Hadoop2RecordType.JobUnsuccessfulCompletion))) {
if (FINISH_TIME_KEY_STR.equals(JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING.get(key))) {
this.jobDetails.setFinishTime(value);
}
}
} else if (type.equalsIgnoreCase(TYPE_INT)) {
int value = eventDetails.getInt(key);
populatePut(p, Constants.INFO_FAM_BYTES, key, value);
// populate total maps, reduces of the job
if (keyH2H1Mapping != null) {
if (keyH2H1Mapping.equals(JobHistoryKeys.TOTAL_MAPS.toString())) {
this.jobDetails.setTotalMaps(value);
}
if (keyH2H1Mapping.equals(JobHistoryKeys.TOTAL_REDUCES.toString())) {
this.jobDetails.setTotalReduces(value);
}
}
} else if (type.equalsIgnoreCase(TYPE_BOOLEAN)) {
boolean value = eventDetails.getBoolean(key);
populatePut(p, Constants.INFO_FAM_BYTES, key, Boolean.toString(value));
} else if (type.equalsIgnoreCase(TYPE_ARRAY_INTS)) {
String value = eventDetails.getString(key);
populatePut(p, Constants.INFO_FAM_BYTES, key, value);
} else if (type.equalsIgnoreCase(NULL_STRING)) {
// usually seen in FAILED tasks
String value = eventDetails.getString(key);
populatePut(p, Constants.INFO_FAM_BYTES, key, value);
} else if (type.equalsIgnoreCase(TYPE_MAP_STRINGS)) {
JSONObject ms = new JSONObject(eventDetails.get(key).toString());
populatePut(p, Constants.INFO_FAM_BYTES, key, ms.toString());
} else {
throw new ProcessingException("Encountered a new type " + type
+ " unable to complete processing " + this.jobKey);
}
}
}
/**
* iterate over the event details and prepare puts
* @throws JSONException
*/
private void iterateAndPreparePuts(JSONObject eventDetails, Put p, Hadoop2RecordType recType)
throws JSONException {
Iterator<?> keys = eventDetails.keys();
while (keys.hasNext()) {
String key = (String) keys.next();
processAllTypes(p, recType, eventDetails, key);
}
}
/**
* process individual records
* @throws JSONException
*/
private void processRecords(Hadoop2RecordType recType, JSONObject eventDetails)
throws JSONException {
switch (recType) {
case JobFinished:
// this setting is needed since the job history file is missing
// the jobStatus field in the JOB_FINISHED event
this.jobStatus = JOB_STATUS_SUCCEEDED;
case JobInfoChange:
case JobInited:
case JobPriorityChange:
case JobStatusChanged:
case JobQueueChange:
case JobSubmitted:
case JobUnsuccessfulCompletion:
Put pJob = new Put(this.jobKeyBytes);
iterateAndPreparePuts(eventDetails, pJob, recType);
this.jobPuts.add(pJob);
break;
case AMStarted:
byte[] amAttemptIdKeyBytes =
getAMKey(AM_ATTEMPT_PREFIX, eventDetails.getString(APPLICATION_ATTEMPTID));
// generate a new put per AM Attempt
Put pAM = new Put(amAttemptIdKeyBytes);
pAM.add(Constants.INFO_FAM_BYTES, Constants.RECORD_TYPE_COL_BYTES,
Bytes.toBytes(RecordTypes.Task.toString()));
iterateAndPreparePuts(eventDetails, pAM, recType);
taskPuts.add(pAM);
break;
case MapAttemptFinished:
byte[] taskMAttemptIdKeyBytes =
getTaskKey(TASK_ATTEMPT_PREFIX, this.jobNumber, eventDetails.getString(ATTEMPTID));
Put pMTaskAttempt = new Put(taskMAttemptIdKeyBytes);
pMTaskAttempt.add(Constants.INFO_FAM_BYTES, Constants.RECORD_TYPE_COL_BYTES,
Bytes.toBytes(RecordTypes.MapAttempt.toString()));
iterateAndPreparePuts(eventDetails, pMTaskAttempt, recType);
this.taskPuts.add(pMTaskAttempt);
break;
case ReduceAttemptFinished:
byte[] taskRAttemptIdKeyBytes =
getTaskKey(TASK_ATTEMPT_PREFIX, this.jobNumber, eventDetails.getString(ATTEMPTID));
Put pRTaskAttempt = new Put(taskRAttemptIdKeyBytes);
pRTaskAttempt.add(Constants.INFO_FAM_BYTES, Constants.RECORD_TYPE_COL_BYTES,
Bytes.toBytes(RecordTypes.ReduceAttempt.toString()));
iterateAndPreparePuts(eventDetails, pRTaskAttempt, recType);
this.taskPuts.add(pRTaskAttempt);
break;
case TaskAttemptFinished:
case TaskAttemptStarted:
case TaskAttemptUnsuccessfulCompletion:
byte[] taskAttemptIdKeyBytes =
getTaskKey(TASK_ATTEMPT_PREFIX, this.jobNumber, eventDetails.getString(ATTEMPTID));
Put pTaskAttempt = new Put(taskAttemptIdKeyBytes);
pTaskAttempt.add(Constants.INFO_FAM_BYTES, Constants.RECORD_TYPE_COL_BYTES,
Bytes.toBytes(RecordTypes.Task.toString()));
iterateAndPreparePuts(eventDetails, pTaskAttempt, recType);
taskPuts.add(pTaskAttempt);
break;
case TaskFailed:
case TaskStarted:
case TaskUpdated:
case TaskFinished:
byte[] taskIdKeyBytes =
getTaskKey(TASK_PREFIX, this.jobNumber, eventDetails.getString(TASKID));
Put pTask = new Put(taskIdKeyBytes);
pTask.add(Constants.INFO_FAM_BYTES, Constants.RECORD_TYPE_COL_BYTES,
Bytes.toBytes(RecordTypes.Task.toString()));
iterateAndPreparePuts(eventDetails, pTask, recType);
taskPuts.add(pTask);
break;
default:
LOG.error("Check if recType was modified and has new members?");
throw new ProcessingException("Check if recType was modified and has new members? " + recType);
}
}
/**
* Sets the job ID and strips out the job number (job ID minus the "job_" prefix).
* @param id
*/
private void setJobId(String id) {
if (id != null && id.startsWith("job_") && id.length() > 4) {
this.jobNumber = id.substring(4);
}
}
/**
* maintains compatibility between hadoop 1.0 keys and hadoop 2.0 keys. It also confirms that this
* key exists in JobHistoryKeys enum
* @throws IllegalArgumentException NullPointerException
*/
private String getKey(String key) throws IllegalArgumentException {
String checkKey =
JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING.containsKey(key) ? JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING
.get(key) : key;
return (JobHistoryKeys.valueOf(checkKey).toString());
}
/**
* populates a put for long values
* @param {@link Put} p
* @param {@link Constants} family
* @param String key
* @param long value
*/
private void populatePut(Put p, byte[] family, String key, long value) {
byte[] valueBytes = null;
valueBytes = (value != 0L) ? Bytes.toBytes(value) : Constants.ZERO_LONG_BYTES;
byte[] qualifier = Bytes.toBytes(getKey(key).toLowerCase());
p.add(family, qualifier, valueBytes);
}
/**
* gets the int values as ints or longs some keys in 2.0 are now int, they were longs in 1.0 this
* will maintain compatiblity between 1.0 and 2.0 by casting those ints to long
*
* keeping this function package level visible (unit testing)
* @throws IllegalArgumentException if new key is encountered
*/
byte[] getValue(String key, int value) {
byte[] valueBytes = null;
Class<?> clazz = JobHistoryKeys.KEY_TYPES.get(JobHistoryKeys.valueOf(key));
if (clazz == null) {
throw new IllegalArgumentException(" unknown key " + key + " encountered while parsing "
+ this.jobKey);
}
if (Long.class.equals(clazz)) {
valueBytes = (value != 0L) ? Bytes.toBytes(new Long(value)) : Constants.ZERO_LONG_BYTES;
} else {
valueBytes = (value != 0) ? Bytes.toBytes(value) : Constants.ZERO_INT_BYTES;
}
return valueBytes;
}
/**
* populates a put for int values
* @param {@link Put} p
* @param {@link Constants} family
* @param String key
* @param int value
*/
private void populatePut(Put p, byte[] family, String key, int value) {
String jobHistoryKey = getKey(key);
byte[] valueBytes = getValue(jobHistoryKey, value);
byte[] qualifier = Bytes.toBytes(jobHistoryKey.toLowerCase());
p.add(family, qualifier, valueBytes);
}
/**
* populates a put for string values
* @param {@link Put} p
* @param {@link Constants} family
* @param {@link String} key
* @param String value
*/
private void populatePut(Put p, byte[] family, String key, String value) {
byte[] valueBytes = null;
valueBytes = Bytes.toBytes(value);
byte[] qualifier = Bytes.toBytes(getKey(key).toLowerCase());
p.add(family, qualifier, valueBytes);
}
/**
* populates a put for {@link Counters}
* @param {@link Put} p
* @param {@link Constants} family
* @param String key
* @param String groupName
* @param String counterName
* @param long counterValue
*/
private void populatePut(Put p, byte[] family, String key, String groupName, String counterName,
Long counterValue) {
byte[] counterPrefix = null;
try {
switch (JobHistoryKeys.valueOf(JobHistoryKeys.class, key)) {
case COUNTERS:
case TOTAL_COUNTERS:
case TASK_COUNTERS:
case TASK_ATTEMPT_COUNTERS:
counterPrefix = Bytes.add(Constants.COUNTER_COLUMN_PREFIX_BYTES, Constants.SEP_BYTES);
break;
case MAP_COUNTERS:
counterPrefix = Bytes.add(Constants.MAP_COUNTER_COLUMN_PREFIX_BYTES, Constants.SEP_BYTES);
break;
case REDUCE_COUNTERS:
counterPrefix =
Bytes.add(Constants.REDUCE_COUNTER_COLUMN_PREFIX_BYTES, Constants.SEP_BYTES);
break;
default:
throw new IllegalArgumentException("Unknown counter type " + key.toString());
}
} catch (IllegalArgumentException iae) {
throw new ProcessingException("Unknown counter type " + key, iae);
} catch (NullPointerException npe) {
throw new ProcessingException("Null counter type " + key, npe);
}
byte[] groupPrefix = Bytes.add(counterPrefix, Bytes.toBytes(groupName), Constants.SEP_BYTES);
byte[] qualifier = Bytes.add(groupPrefix, Bytes.toBytes(counterName));
/*
* store the map and reduce mb millis counter value
*/
if (Constants.JOB_COUNTER_HADOOP2.equals(groupName)) {
if (Constants.MB_MILLIS_MAPS.equals(counterName)) {
this.mapMbMillis = counterValue;
} else if (Constants.MB_MILLIS_REDUCES.equals(counterName)) {
this.reduceMbMillis = counterValue;
}
}
/*
* correct and populate map and reduce slot millis
*/
if ((Constants.SLOTS_MILLIS_MAPS.equals(counterName)) ||
(Constants.SLOTS_MILLIS_REDUCES.equals(counterName))) {
counterValue = getStandardizedCounterValue(counterName, counterValue);
}
p.add(family, qualifier, Bytes.toBytes(counterValue));
}
private long getMemoryMb(String key) {
long memoryMb = 0L;
if (Constants.MAP_MEMORY_MB_CONF_KEY.equals(key)){
memoryMb = this.jobConf.getLong(key, Constants.DEFAULT_MAP_MEMORY_MB);
}else if (Constants.REDUCE_MEMORY_MB_CONF_KEY.equals(key)){
memoryMb = this.jobConf.getLong(key, Constants.DEFAULT_REDUCE_MEMORY_MB);
}
if (memoryMb == 0L) {
throw new ProcessingException(
"While correcting slot millis, " + key + " was found to be 0 ");
}
return memoryMb;
}
/**
* Issue #51 in hraven on github
* map and reduce slot millis in Hadoop 2.0 are not calculated properly.
* They are aproximately 4X off by actual value.
* calculate the correct map slot millis as
* hadoop2ReportedMapSlotMillis * yarn.scheduler.minimum-allocation-mb
* / mapreduce.mapreduce.memory.mb
* similarly for reduce slot millis
*
* Marking method as deprecated as noted in Pull Request #132
* @param counterName
* @param counterValue
* @return corrected counter value
*/
@Deprecated
private Long getStandardizedCounterValue(String counterName, Long counterValue) {
if (jobConf == null) {
throw new ProcessingException("While correcting slot millis, jobConf is null");
}
long yarnSchedulerMinMB = this.jobConf.getLong(Constants.YARN_SCHEDULER_MIN_MB,
Constants.DEFAULT_YARN_SCHEDULER_MIN_MB);
long updatedCounterValue = 0L;
long memoryMb = 0L;
String key;
if (Constants.SLOTS_MILLIS_MAPS.equals(counterName)) {
key = Constants.MAP_MEMORY_MB_CONF_KEY;
memoryMb = getMemoryMb(key);
updatedCounterValue = counterValue * yarnSchedulerMinMB / memoryMb;
this.jobDetails.setMapSlotMillis(updatedCounterValue);
} else {
key = Constants.REDUCE_MEMORY_MB_CONF_KEY;
memoryMb = getMemoryMb(key);
updatedCounterValue = counterValue * yarnSchedulerMinMB / memoryMb;
this.jobDetails.setReduceSlotMillis(updatedCounterValue);
}
LOG.info("Updated " + counterName + " from " + counterValue + " to " + updatedCounterValue
+ " based on " + Constants.YARN_SCHEDULER_MIN_MB + ": " + yarnSchedulerMinMB
+ " and " + key + ": " + memoryMb);
return updatedCounterValue;
}
/**
* Returns the Task ID or Task Attempt ID, stripped of the leading job ID, appended to the job row
* key.
*/
public byte[] getTaskKey(String prefix, String jobNumber, String fullId) {
String taskComponent = fullId;
if (fullId == null) {
taskComponent = "";
} else {
String expectedPrefix = prefix + jobNumber + "_";
if ((fullId.startsWith(expectedPrefix)) && (fullId.length() > expectedPrefix.length())) {
taskComponent = fullId.substring(expectedPrefix.length());
}
}
return taskKeyConv.toBytes(new TaskKey(this.jobKey, taskComponent));
}
/**
* Returns the AM Attempt id stripped of the leading job ID, appended to the job row key.
*/
public byte[] getAMKey(String prefix, String fullId) {
String taskComponent = prefix + fullId;
return taskKeyConv.toBytes(new TaskKey(this.jobKey, taskComponent));
}
/**
* {@inheritDoc}
*/
@Override
public List<Put> getJobPuts() {
return jobPuts;
}
/**
* {@inheritDoc}
*/
@Override
public List<Put> getTaskPuts() {
return taskPuts;
}
/**
* utitlity function for printing all puts
*/
public void printAllPuts(List<Put> p) {
for (Put p1 : p) {
Map<byte[], List<KeyValue>> d = p1.getFamilyMap();
for (byte[] k : d.keySet()) {
System.out.println(" k " + Bytes.toString(k));
}
for (List<KeyValue> lkv : d.values()) {
for (KeyValue kv : lkv) {
System.out.println("\n row: " + taskKeyConv.fromBytes(kv.getRow())
+ "\n " + Bytes.toString(kv.getQualifier()) + ": " + Bytes.toString(kv.getValue()));
}
}
}
}
/**
* calculate mega byte millis puts as:
* if not uberized:
* map slot millis * mapreduce.map.memory.mb
* + reduce slot millis * mapreduce.reduce.memory.mb
* + yarn.app.mapreduce.am.resource.mb * job runtime
* if uberized:
* yarn.app.mapreduce.am.resource.mb * job run time
*/
@Override
public Long getMegaByteMillis() {
long endTime = this.jobDetails.getFinishTime();
long startTime = this.jobDetails.getSubmitTime();
if (endTime == Constants.NOTFOUND_VALUE || startTime == Constants.NOTFOUND_VALUE)
{
throw new ProcessingException("Cannot calculate megabytemillis for " + jobKey
+ " since one or more of endTime " + endTime + " startTime " + startTime
+ " not found!");
}
long jobRunTime = 0L;
long amMb = 0L;
long mapMb = 0L;
long reduceMb = 0L;
jobRunTime = endTime - startTime;
if (jobConf == null) {
LOG.error("job conf is null? for job key: " + jobKey.toString());
return null;
}
// get am memory mb, map memory mb, reducer memory mb from job conf
try {
amMb = jobConf.getLong(Constants.AM_MEMORY_MB_CONF_KEY, Constants.NOTFOUND_VALUE);
mapMb = jobConf.getLong(Constants.MAP_MEMORY_MB_CONF_KEY, Constants.NOTFOUND_VALUE);
reduceMb = jobConf.getLong(Constants.REDUCE_MEMORY_MB_CONF_KEY, Constants.NOTFOUND_VALUE);
} catch (ConversionException ce) {
LOG.error(" Could not convert to long ", ce);
throw new ProcessingException(
" Can't calculate megabytemillis since conversion to long failed", ce);
}
if (amMb == Constants.NOTFOUND_VALUE ) {
throw new ProcessingException("Cannot calculate megabytemillis for " + jobKey
+ " since " + Constants.AM_MEMORY_MB_CONF_KEY + " not found!");
}
long mapSlotMillis = this.jobDetails.getMapSlotMillis();
long reduceSlotMillis = this.jobDetails.getReduceSlotMillis();
/* in case of older versions of hadoop2
* the counter of mb millis is not available
* then use slot millis counter value
*/
if (this.mapMbMillis == Constants.NOTFOUND_VALUE) {
this.mapMbMillis = (mapMb * mapSlotMillis);
}
if (this.reduceMbMillis == Constants.NOTFOUND_VALUE) {
this.reduceMbMillis = (reduceMb * reduceSlotMillis);
}
long mbMillis = 0L;
if (uberized) {
mbMillis = amMb * jobRunTime;
} else {
mbMillis = this.mapMbMillis + this.reduceMbMillis + (amMb * jobRunTime);
}
LOG.debug("For " + jobKey.toString() + "\n" + Constants.MEGABYTEMILLIS + " is " + mbMillis
+ " since \n uberized: " + uberized + " \n " + "mapMbMillis: " + mapMbMillis
+ " reduceMbMillis:" + reduceMbMillis + "mapMb: " + mapMb + " mapSlotMillis: "
+ mapSlotMillis + " \n " + " reduceMb: " + reduceMb + " reduceSlotMillis: "
+ reduceSlotMillis + " \n " + " amMb: " + amMb + " jobRunTime: " + jobRunTime
+ " start time: " + startTime + " endtime " + endTime);
this.jobDetails.setMegabyteMillis(mbMillis);
return mbMillis;
}
@Override
public JobDetails getJobDetails() {
return this.jobDetails;
}
public long getMapMbMillis() {
return mapMbMillis;
}
public void setMapMbMillis(long mapMbMillis) {
this.mapMbMillis = mapMbMillis;
}
public long getReduceMbMillis() {
return reduceMbMillis;
}
public void setReduceMbMillis(long reduceMbMillis) {
this.reduceMbMillis = reduceMbMillis;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.test.replication;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import org.apache.accumulo.core.client.BatchScanner;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.impl.Table;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.user.WholeRowIterator;
import org.apache.accumulo.core.protobuf.ProtobufUtil;
import org.apache.accumulo.core.replication.ReplicationSchema.OrderSection;
import org.apache.accumulo.core.replication.ReplicationSchema.StatusSection;
import org.apache.accumulo.core.replication.ReplicationSchema.WorkSection;
import org.apache.accumulo.core.replication.ReplicationTable;
import org.apache.accumulo.core.replication.ReplicationTarget;
import org.apache.accumulo.core.security.TablePermission;
import org.apache.accumulo.master.replication.RemoveCompleteReplicationRecords;
import org.apache.accumulo.server.replication.StatusUtil;
import org.apache.accumulo.server.replication.proto.Replication.Status;
import org.apache.accumulo.test.functional.ConfigurableMacBase;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Iterables;
public class RemoveCompleteReplicationRecordsIT extends ConfigurableMacBase {
private MockRemoveCompleteReplicationRecords rcrr;
private Connector conn;
private static class MockRemoveCompleteReplicationRecords extends RemoveCompleteReplicationRecords {
public MockRemoveCompleteReplicationRecords(Connector conn) {
super(conn);
}
@Override
public long removeCompleteRecords(Connector conn, BatchScanner bs, BatchWriter bw) {
return super.removeCompleteRecords(conn, bs, bw);
}
}
@Before
public void initialize() throws Exception {
conn = getConnector();
rcrr = new MockRemoveCompleteReplicationRecords(conn);
conn.securityOperations().grantTablePermission(conn.whoami(), ReplicationTable.NAME, TablePermission.READ);
conn.securityOperations().grantTablePermission(conn.whoami(), ReplicationTable.NAME, TablePermission.WRITE);
ReplicationTable.setOnline(conn);
}
private Table.ID createTableId(int i) {
return Table.ID.of(Integer.toString(i));
}
@Test
public void notYetReplicationRecordsIgnored() throws Exception {
BatchWriter bw = ReplicationTable.getBatchWriter(conn);
int numRecords = 3;
for (int i = 0; i < numRecords; i++) {
String file = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
Mutation m = new Mutation(file);
StatusSection.add(m, createTableId(i), StatusUtil.openWithUnknownLengthValue());
bw.addMutation(m);
}
bw.close();
Assert.assertEquals(numRecords, Iterables.size(ReplicationTable.getScanner(conn)));
try (BatchScanner bs = ReplicationTable.getBatchScanner(conn, 1)) {
bs.setRanges(Collections.singleton(new Range()));
IteratorSetting cfg = new IteratorSetting(50, WholeRowIterator.class);
bs.addScanIterator(cfg);
bw = EasyMock.createMock(BatchWriter.class);
EasyMock.replay(bw);
rcrr.removeCompleteRecords(conn, bs, bw);
Assert.assertEquals(numRecords, Iterables.size(ReplicationTable.getScanner(conn)));
}
}
@Test
public void partiallyReplicatedRecordsIgnored() throws Exception {
BatchWriter bw = ReplicationTable.getBatchWriter(conn);
int numRecords = 3;
Status.Builder builder = Status.newBuilder();
builder.setClosed(false);
builder.setEnd(10000);
builder.setInfiniteEnd(false);
for (int i = 0; i < numRecords; i++) {
String file = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
Mutation m = new Mutation(file);
StatusSection.add(m, createTableId(i), ProtobufUtil.toValue(builder.setBegin(1000 * (i + 1)).build()));
bw.addMutation(m);
}
bw.close();
Assert.assertEquals(numRecords, Iterables.size(ReplicationTable.getScanner(conn)));
try (BatchScanner bs = ReplicationTable.getBatchScanner(conn, 1)) {
bs.setRanges(Collections.singleton(new Range()));
IteratorSetting cfg = new IteratorSetting(50, WholeRowIterator.class);
bs.addScanIterator(cfg);
bw = EasyMock.createMock(BatchWriter.class);
EasyMock.replay(bw);
// We don't remove any records, so we can just pass in a fake BW for both
rcrr.removeCompleteRecords(conn, bs, bw);
Assert.assertEquals(numRecords, Iterables.size(ReplicationTable.getScanner(conn)));
}
}
@Test
public void replicatedClosedWorkRecordsAreNotRemovedWithoutClosedStatusRecords() throws Exception {
BatchWriter replBw = ReplicationTable.getBatchWriter(conn);
int numRecords = 3;
Status.Builder builder = Status.newBuilder();
builder.setClosed(false);
builder.setEnd(10000);
builder.setInfiniteEnd(false);
// Write out numRecords entries to both replication and metadata tables, none of which are fully replicated
for (int i = 0; i < numRecords; i++) {
String file = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
Mutation m = new Mutation(file);
StatusSection.add(m, createTableId(i), ProtobufUtil.toValue(builder.setBegin(1000 * (i + 1)).build()));
replBw.addMutation(m);
}
// Add two records that we can delete
String fileToRemove = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
Mutation m = new Mutation(fileToRemove);
StatusSection.add(m, Table.ID.of("5"), ProtobufUtil.toValue(builder.setBegin(10000).setEnd(10000).setClosed(false).build()));
replBw.addMutation(m);
numRecords++;
fileToRemove = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
m = new Mutation(fileToRemove);
StatusSection.add(m, Table.ID.of("6"), ProtobufUtil.toValue(builder.setBegin(10000).setEnd(10000).setClosed(false).build()));
replBw.addMutation(m);
numRecords++;
replBw.flush();
// Make sure that we have the expected number of records in both tables
Assert.assertEquals(numRecords, Iterables.size(ReplicationTable.getScanner(conn)));
// We should not remove any records because they're missing closed status
try (BatchScanner bs = ReplicationTable.getBatchScanner(conn, 1)) {
bs.setRanges(Collections.singleton(new Range()));
IteratorSetting cfg = new IteratorSetting(50, WholeRowIterator.class);
bs.addScanIterator(cfg);
try {
Assert.assertEquals(0l, rcrr.removeCompleteRecords(conn, bs, replBw));
} finally {
replBw.close();
}
}
}
@Test
public void replicatedClosedRowsAreRemoved() throws Exception {
BatchWriter replBw = ReplicationTable.getBatchWriter(conn);
int numRecords = 3;
Status.Builder builder = Status.newBuilder();
builder.setClosed(false);
builder.setEnd(10000);
builder.setInfiniteEnd(false);
long time = System.currentTimeMillis();
// Write out numRecords entries to both replication and metadata tables, none of which are fully replicated
for (int i = 0; i < numRecords; i++) {
builder.setCreatedTime(time++);
String file = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
Mutation m = new Mutation(file);
Value v = ProtobufUtil.toValue(builder.setBegin(1000 * (i + 1)).build());
StatusSection.add(m, createTableId(i), v);
replBw.addMutation(m);
m = OrderSection.createMutation(file, time);
OrderSection.add(m, createTableId(i), v);
replBw.addMutation(m);
}
Set<String> filesToRemove = new HashSet<>();
// We created two mutations for each file
numRecords *= 2;
int finalNumRecords = numRecords;
// Add two records that we can delete
String fileToRemove = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
filesToRemove.add(fileToRemove);
Mutation m = new Mutation(fileToRemove);
ReplicationTarget target = new ReplicationTarget("peer1", "5", Table.ID.of("5"));
Value value = ProtobufUtil.toValue(builder.setBegin(10000).setEnd(10000).setClosed(true).setCreatedTime(time).build());
StatusSection.add(m, Table.ID.of("5"), value);
WorkSection.add(m, target.toText(), value);
replBw.addMutation(m);
m = OrderSection.createMutation(fileToRemove, time);
OrderSection.add(m, Table.ID.of("5"), value);
replBw.addMutation(m);
time++;
numRecords += 3;
fileToRemove = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
filesToRemove.add(fileToRemove);
m = new Mutation(fileToRemove);
value = ProtobufUtil.toValue(builder.setBegin(10000).setEnd(10000).setClosed(true).setCreatedTime(time).build());
target = new ReplicationTarget("peer1", "6", Table.ID.of("6"));
StatusSection.add(m, Table.ID.of("6"), value);
WorkSection.add(m, target.toText(), value);
replBw.addMutation(m);
m = OrderSection.createMutation(fileToRemove, time);
OrderSection.add(m, Table.ID.of("6"), value);
replBw.addMutation(m);
time++;
numRecords += 3;
replBw.flush();
// Make sure that we have the expected number of records in both tables
Assert.assertEquals(numRecords, Iterables.size(ReplicationTable.getScanner(conn)));
// We should remove the two fully completed records we inserted
try (BatchScanner bs = ReplicationTable.getBatchScanner(conn, 1)) {
bs.setRanges(Collections.singleton(new Range()));
StatusSection.limit(bs);
WorkSection.limit(bs);
IteratorSetting cfg = new IteratorSetting(50, WholeRowIterator.class);
bs.addScanIterator(cfg);
try {
Assert.assertEquals(4l, rcrr.removeCompleteRecords(conn, bs, replBw));
} finally {
replBw.close();
}
int actualRecords = 0;
for (Entry<Key,Value> entry : ReplicationTable.getScanner(conn)) {
Assert.assertFalse(filesToRemove.contains(entry.getKey().getRow().toString()));
actualRecords++;
}
Assert.assertEquals(finalNumRecords, actualRecords);
}
}
@Test
public void partiallyReplicatedEntriesPrecludeRowDeletion() throws Exception {
BatchWriter replBw = ReplicationTable.getBatchWriter(conn);
int numRecords = 3;
Status.Builder builder = Status.newBuilder();
builder.setClosed(false);
builder.setEnd(10000);
builder.setInfiniteEnd(false);
// Write out numRecords entries to both replication and metadata tables, none of which are fully replicated
for (int i = 0; i < numRecords; i++) {
String file = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
Mutation m = new Mutation(file);
StatusSection.add(m, createTableId(i), ProtobufUtil.toValue(builder.setBegin(1000 * (i + 1)).build()));
replBw.addMutation(m);
}
// Add two records that we can delete
String fileToRemove = "/accumulo/wal/tserver+port/" + UUID.randomUUID();
Mutation m = new Mutation(fileToRemove);
ReplicationTarget target = new ReplicationTarget("peer1", "5", Table.ID.of("5"));
Value value = ProtobufUtil.toValue(builder.setBegin(10000).setEnd(10000).setClosed(true).build());
StatusSection.add(m, Table.ID.of("5"), value);
WorkSection.add(m, target.toText(), value);
target = new ReplicationTarget("peer2", "5", Table.ID.of("5"));
WorkSection.add(m, target.toText(), value);
target = new ReplicationTarget("peer3", "5", Table.ID.of("5"));
WorkSection.add(m, target.toText(), ProtobufUtil.toValue(builder.setClosed(false).build()));
replBw.addMutation(m);
numRecords += 4;
replBw.flush();
// Make sure that we have the expected number of records in both tables
Assert.assertEquals(numRecords, Iterables.size(ReplicationTable.getScanner(conn)));
// We should remove the two fully completed records we inserted
try (BatchScanner bs = ReplicationTable.getBatchScanner(conn, 1)) {
bs.setRanges(Collections.singleton(new Range()));
IteratorSetting cfg = new IteratorSetting(50, WholeRowIterator.class);
bs.addScanIterator(cfg);
try {
Assert.assertEquals(0l, rcrr.removeCompleteRecords(conn, bs, replBw));
} finally {
replBw.close();
}
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.anonymousToInner;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.help.HelpManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.refactoring.HelpID;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.ui.NameSuggestionsField;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.refactoring.util.ParameterTablePanel;
import com.intellij.refactoring.util.RefactoringMessageUtil;
import com.intellij.refactoring.util.VariableData;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.NonFocusableCheckBox;
import com.intellij.util.ui.FormBuilder;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.util.HashMap;
import java.util.Map;
class AnonymousToInnerDialog extends DialogWrapper{
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.anonymousToInner.AnonymousToInnerDialog");
private final Project myProject;
private final PsiAnonymousClass myAnonClass;
private final boolean myShowCanBeStatic;
private NameSuggestionsField myNameField;
private final VariableData[] myVariableData;
private final Map<PsiVariable,VariableInfo> myVariableToInfoMap = new HashMap<>();
private JCheckBox myCbMakeStatic;
public AnonymousToInnerDialog(Project project, PsiAnonymousClass anonClass, final VariableInfo[] variableInfos,
boolean showCanBeStatic) {
super(project, true);
myProject = project;
myAnonClass = anonClass;
myShowCanBeStatic = showCanBeStatic;
setTitle(AnonymousToInnerHandler.REFACTORING_NAME);
for (VariableInfo info : variableInfos) {
myVariableToInfoMap.put(info.variable, info);
}
myVariableData = new VariableData[variableInfos.length];
final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(myProject);
for(int idx = 0; idx < variableInfos.length; idx++){
VariableInfo info = variableInfos[idx];
String name = info.variable.getName();
VariableKind kind = codeStyleManager.getVariableKind(info.variable);
name = codeStyleManager.variableNameToPropertyName(name, kind);
name = codeStyleManager.propertyNameToVariableName(name, VariableKind.PARAMETER);
VariableData data = new VariableData(info.variable);
data.name = name;
data.passAsParameter = true;
myVariableData[idx] = data;
}
init();
final String[] names;
String name = myAnonClass.getBaseClassReference().getReferenceName();
PsiType[] typeParameters = myAnonClass.getBaseClassReference().getTypeParameters();
final String typeParamsList = StringUtil.join(typeParameters, psiType -> {
PsiType type = psiType;
if (psiType instanceof PsiClassType) {
type = TypeConversionUtil.erasure(psiType);
}
if (type == null || type.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) return "";
if (type instanceof PsiArrayType) {
type = type.getDeepComponentType();
}
return StringUtil.getShortName(type.getPresentableText());
}, "") + name;
if (!typeParamsList.equals(name)) {
names = new String[]{typeParamsList, "My" + name};
} else {
names = new String[]{"My" + name};
}
myNameField.setSuggestions(names);
myNameField.selectNameWithoutExtension();
}
@NotNull
protected Action[] createActions(){
return new Action[]{getOKAction(),getCancelAction(),getHelpAction()};
}
public JComponent getPreferredFocusedComponent() {
return myNameField.getFocusableComponent();
}
public boolean isMakeStatic() {
return myCbMakeStatic.isSelected();
}
public String getClassName() {
return myNameField.getEnteredName();
}
public VariableInfo[] getVariableInfos() {
JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(myProject);
VariableInfo[] infos = new VariableInfo[myVariableData.length];
for (int idx = 0; idx < myVariableData.length; idx++) {
VariableData data = myVariableData[idx];
VariableInfo info = myVariableToInfoMap.get(data.variable);
info.passAsParameter = data.passAsParameter;
info.parameterName = data.name;
info.parameterName = data.name;
String propertyName = codeStyleManager.variableNameToPropertyName(data.name, VariableKind.PARAMETER);
info.fieldName = codeStyleManager.propertyNameToVariableName(propertyName, VariableKind.FIELD);
infos[idx] = info;
}
return infos;
}
protected void doOKAction(){
String errorString = null;
final String innerClassName = getClassName();
final PsiManager manager = PsiManager.getInstance(myProject);
if ("".equals(innerClassName)) {
errorString = RefactoringBundle.message("anonymousToInner.no.inner.class.name");
}
else {
if (!PsiNameHelper.getInstance(manager.getProject()).isIdentifier(innerClassName)) {
errorString = RefactoringMessageUtil.getIncorrectIdentifierMessage(innerClassName);
}
else{
PsiElement targetContainer = AnonymousToInnerHandler.findTargetContainer(myAnonClass);
if (targetContainer instanceof PsiClass) {
PsiClass targetClass = (PsiClass)targetContainer;
PsiClass[] innerClasses = targetClass.getInnerClasses();
for (PsiClass innerClass : innerClasses) {
if (innerClassName.equals(innerClass.getName())) {
errorString = RefactoringBundle.message("inner.class.exists", innerClassName, targetClass.getName());
break;
}
}
}
else {
LOG.assertTrue(false);
}
}
}
if (errorString != null) {
CommonRefactoringUtil.showErrorMessage(
AnonymousToInnerHandler.REFACTORING_NAME,
errorString,
HelpID.ANONYMOUS_TO_INNER,
myProject);
myNameField.requestFocusInWindow();
return;
}
super.doOKAction();
myNameField.requestFocusInWindow();
}
protected JComponent createNorthPanel() {
myNameField = new NameSuggestionsField(myProject);
FormBuilder formBuilder = FormBuilder.createFormBuilder()
.addLabeledComponent(RefactoringBundle.message("anonymousToInner.class.name.label.text"), myNameField);
if(!myShowCanBeStatic) {
myCbMakeStatic = new NonFocusableCheckBox(RefactoringBundle.message("anonymousToInner.make.class.static.checkbox.text"));
myCbMakeStatic.setSelected(true);
formBuilder.addComponent(myCbMakeStatic);
}
return formBuilder.getPanel();
}
private JComponent createParametersPanel() {
JPanel panel = new ParameterTablePanel(myProject, myVariableData, myAnonClass) {
protected void updateSignature() {
}
protected void doEnterAction() {
clickDefaultButton();
}
protected void doCancelAction() {
AnonymousToInnerDialog.this.doCancelAction();
}
};
panel.setBorder(IdeBorderFactory.createTitledBorder(
RefactoringBundle.message("anonymousToInner.parameters.panel.border.title"), false));
return panel;
}
protected JComponent createCenterPanel() {
JPanel panel = new JPanel(new BorderLayout());
panel.add(createParametersPanel(), BorderLayout.CENTER);
return panel;
}
protected void doHelpAction() {
HelpManager.getInstance().invokeHelp(HelpID.ANONYMOUS_TO_INNER);
}
}
| |
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// This file is a part of the 'esoco-gwt' project.
// Copyright 2019 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
package de.esoco.gwt.client.ui;
import de.esoco.data.element.DataElement;
import de.esoco.data.element.DataElementList;
import de.esoco.ewt.UserInterfaceContext;
import de.esoco.ewt.build.ContainerBuilder;
import de.esoco.ewt.component.View;
import de.esoco.ewt.event.EventType;
import de.esoco.ewt.event.EwtEvent;
import de.esoco.ewt.style.StyleData;
import de.esoco.ewt.style.ViewStyle;
import de.esoco.gwt.client.res.EsocoGwtResources;
import de.esoco.lib.property.Alignment;
import de.esoco.lib.property.InteractionEventType;
import de.esoco.lib.property.StandardProperties;
import de.esoco.lib.property.ViewDisplayType;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import static de.esoco.lib.property.LayoutProperties.VERTICAL_ALIGN;
import static de.esoco.lib.property.LayoutProperties.VIEW_DISPLAY_TYPE;
import static de.esoco.lib.property.StyleProperties.AUTO_HIDE;
/********************************************************************
* A class that handles the display of a data element list in a separate GEWT
* view.
*
* @author eso
*/
public class DataElementListView
{
//~ Static fields/initializers ---------------------------------------------
private static Set<ViewStyle.Flag> aDefaultViewFlags =
EnumSet.noneOf(ViewStyle.Flag.class);
//~ Instance fields --------------------------------------------------------
private DataElementListUI aViewUI;
private View aView;
//~ Constructors -----------------------------------------------------------
/***************************************
* Creates a new instance.
*
* @param rParent The parent data element panel manager
* @param rViewElement The data element list to be displayed in a view
*/
public DataElementListView(
DataElementPanelManager rParent,
DataElementList rViewElement)
{
aViewUI =
(DataElementListUI) DataElementUIFactory.create(
rParent,
rViewElement);
}
//~ Static methods ---------------------------------------------------------
/***************************************
* Sets the default view style flags for new views.
*
* @param rDefaultViewFlags The default view style flags
*/
public static final void setDefaultViewFlags(
Set<ViewStyle.Flag> rDefaultViewFlags)
{
aDefaultViewFlags = EnumSet.copyOf(rDefaultViewFlags);
}
//~ Methods ----------------------------------------------------------------
/***************************************
* Collects the modified data element UIs that received user input.
*
* @param rModifiedElements A list to add modified data elements to
*/
public void collectInput(List<DataElement<?>> rModifiedElements)
{
aViewUI.collectInput(rModifiedElements);
}
/***************************************
* Invokes {@link DataElementListUI#enableInteraction(boolean)}.
*
* @param bEnable TRUE to enable interactions, FALSE to disable
*/
public void enableInteraction(boolean bEnable)
{
aViewUI.enableInteraction(bEnable);
}
/***************************************
* Returns the data element UI of this view.
*
* @return The view's data elementUI
*/
public final DataElementListUI getViewUI()
{
return aViewUI;
}
/***************************************
* Hides this view.
*/
public void hide()
{
aView.setVisible(false);
}
/***************************************
* Returns the visibility of this view.
*
* @return TRUE if the view is visible, FALSE if it hidden
*/
public boolean isVisible()
{
return aView.isVisible();
}
/***************************************
* Shows this view.
*/
public void show()
{
ViewDisplayType eViewType =
aViewUI.getDataElement()
.getProperty(
VIEW_DISPLAY_TYPE,
ViewDisplayType.MODAL_DIALOG);
ContainerBuilder<View> rBuilder =
createView(aViewUI.getParent().getContainer().getView(), eViewType);
StyleData rStyle =
PanelManager.addStyles(
StyleData.DEFAULT,
aViewUI.getElementStyleName());
aViewUI.buildUserInterface(rBuilder, rStyle);
DataElementPanelManager rViewManager = aViewUI.getPanelManager();
Collection<DataElement<?>> rElements =
Arrays.asList(rViewManager.getDataElementList());
rViewManager.checkSelectionDependencies(rViewManager, rElements);
aView = rBuilder.getContainer();
aView.pack();
aView.getContext().displayViewCentered(aView);
}
/***************************************
* Updates the data element of this view.
*
* @see DataElementUI#updateDataElement(DataElement, boolean)
*/
public void updateDataElement(
DataElementList rNewElement,
boolean bUpdateUI)
{
aViewUI.clearError();
aViewUI.updateDataElement(rNewElement, bUpdateUI);
}
/***************************************
* Creates a view of a certain type to display the list element UIs.
*
* @param rParentView The parent view
* @param eViewType The view type
*
* @return
*/
private ContainerBuilder<View> createView(
View rParentView,
ViewDisplayType eViewType)
{
DataElementList rDataElementList = aViewUI.getDataElement();
UserInterfaceContext rContext = rParentView.getContext();
ViewStyle rViewStyle = ViewStyle.DEFAULT;
ContainerBuilder<View> aViewBuilder = null;
View aPanelView = null;
Set<ViewStyle.Flag> aViewFlags = EnumSet.copyOf(aDefaultViewFlags);
String sDialogStyle =
EsocoGwtResources.INSTANCE.css().gfDataElementListDialog();
if (eViewType == ViewDisplayType.MODAL_VIEW ||
eViewType == ViewDisplayType.MODAL_DIALOG)
{
rViewStyle = ViewStyle.MODAL;
}
if (rDataElementList.hasProperty(VERTICAL_ALIGN))
{
Alignment eAlignment =
rDataElementList.getProperty(VERTICAL_ALIGN, null);
if (eAlignment == Alignment.END)
{
aViewFlags.add(ViewStyle.Flag.BOTTOM);
}
else
{
aViewFlags.remove(ViewStyle.Flag.BOTTOM);
}
if (eAlignment == Alignment.FILL)
{
aViewFlags.add(ViewStyle.Flag.FULL_SIZE);
}
else
{
aViewFlags.remove(ViewStyle.Flag.FULL_SIZE);
}
}
if (rDataElementList.hasFlag(AUTO_HIDE))
{
aViewFlags.add(ViewStyle.Flag.AUTO_HIDE);
}
else
{
aViewFlags.remove(ViewStyle.Flag.AUTO_HIDE);
}
if (!aViewFlags.isEmpty())
{
rViewStyle = rViewStyle.withFlags(aViewFlags);
}
switch (eViewType)
{
case DIALOG:
case MODAL_DIALOG:
aPanelView = rContext.createDialog(rParentView, rViewStyle);
break;
case VIEW:
case MODAL_VIEW:
aPanelView = rContext.createChildView(rParentView, rViewStyle);
break;
}
String sViewTitle = "$ti" +
rDataElementList.getResourceId();
sViewTitle =
rDataElementList.getProperty(StandardProperties.TITLE, sViewTitle);
aPanelView.addEventListener(
EventType.VIEW_CLOSING,
this::handleViewClosing);
aViewBuilder = new ContainerBuilder<View>(aPanelView);
aPanelView.setTitle(sViewTitle);
aPanelView.applyStyle(
StyleData.DEFAULT.set(
StyleData.WEB_ADDITIONAL_STYLES,
sDialogStyle));
return aViewBuilder;
}
/***************************************
* Handles the view closing event.
*
* @param rEvent The event
*/
private void handleViewClosing(EwtEvent rEvent)
{
aViewUI.getPanelManager()
.handleInteractiveInput(
aViewUI.getDataElement(),
InteractionEventType.UPDATE);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.knative.http;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import io.vertx.core.Handler;
import io.vertx.core.Vertx;
import io.vertx.core.http.HttpServer;
import io.vertx.core.http.HttpServerRequest;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.BodyHandler;
import org.apache.camel.CamelContext;
import org.apache.camel.component.platform.http.PlatformHttpConstants;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.test.AvailablePortFinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KnativeHttpServer extends ServiceSupport {
private static final Logger LOGGER = LoggerFactory.getLogger(KnativeHttpServer.class);
private final CamelContext context;
private final String host;
private final int port;
private final String path;
private final BlockingQueue<HttpServerRequest> requests;
private final Handler<RoutingContext> handler;
private Vertx vertx;
private Router router;
private ExecutorService executor;
private HttpServer server;
public KnativeHttpServer(CamelContext context) {
this(context, "localhost", AvailablePortFinder.getNextAvailable(), "/", null);
}
public KnativeHttpServer(CamelContext context, int port) {
this(context, "localhost", port, "/", null);
}
public KnativeHttpServer(CamelContext context, int port, Handler<RoutingContext> handler) {
this(context, "localhost", port, "/", handler);
}
public KnativeHttpServer(CamelContext context, Handler<RoutingContext> handler) {
this(context, "localhost", AvailablePortFinder.getNextAvailable(), "/", handler);
}
public KnativeHttpServer(CamelContext context, String host, int port, String path) {
this(context, host, port, path, null);
}
public KnativeHttpServer(CamelContext context, String host, String path) {
this(context, host, AvailablePortFinder.getNextAvailable(), path, null);
}
public KnativeHttpServer(CamelContext context, String host, String path, Handler<RoutingContext> handler) {
this(context, host, AvailablePortFinder.getNextAvailable(), path, handler);
}
public KnativeHttpServer(CamelContext context, String host, int port, String path, Handler<RoutingContext> handler) {
this.context = context;
this.host = host;
this.port = port;
this.path = path;
this.requests = new LinkedBlockingQueue<>();
this.handler = handler != null
? handler
: event -> {
event.response().setStatusCode(200);
event.response().end();
};
}
public String getHost() {
return host;
}
public int getPort() {
return port;
}
public String getPath() {
return path;
}
public HttpServerRequest poll(int timeout, TimeUnit unit) throws InterruptedException {
return requests.poll(timeout, unit);
}
@Override
protected void doStart() {
this.executor = context.getExecutorServiceManager().newSingleThreadExecutor(this, "knative-http-server");
this.vertx = Vertx.vertx();
this.server = vertx.createHttpServer();
this.router = Router.router(vertx);
this.router.route(path)
.handler(event -> {
event.request().resume();
BodyHandler.create().handle(event);
})
.handler(event -> {
this.requests.offer(event.request());
event.next();
})
.handler(handler);
CompletableFuture.runAsync(
() -> {
CountDownLatch latch = new CountDownLatch(1);
server.requestHandler(router).listen(port, host, result -> {
try {
if (result.failed()) {
LOGGER.warn("Failed to start Vert.x HttpServer on {}:{}, reason: {}",
host,
port,
result.cause().getMessage());
throw new RuntimeException(result.cause());
}
LOGGER.info("Vert.x HttpServer started on {}:{}", host, port);
} finally {
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
},
executor).toCompletableFuture().join();
}
@Override
protected void doStop() throws Exception {
try {
if (server != null) {
CompletableFuture.runAsync(
() -> {
CountDownLatch latch = new CountDownLatch(1);
// remove the platform-http component
context.removeComponent(PlatformHttpConstants.PLATFORM_HTTP_COMPONENT_NAME);
server.close(result -> {
try {
if (result.failed()) {
LOGGER.warn("Failed to close Vert.x HttpServer reason: {}",
result.cause().getMessage());
throw new RuntimeException(result.cause());
}
LOGGER.info("Vert.x HttpServer stopped");
} finally {
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
},
executor).toCompletableFuture().join();
}
} finally {
this.server = null;
}
if (vertx != null) {
Future<?> future = executor.submit(
() -> {
CountDownLatch latch = new CountDownLatch(1);
vertx.close(result -> {
try {
if (result.failed()) {
LOGGER.warn("Failed to close Vert.x reason: {}",
result.cause().getMessage());
throw new RuntimeException(result.cause());
}
LOGGER.info("Vert.x stopped");
} finally {
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
try {
future.get();
} finally {
vertx = null;
}
}
if (executor != null) {
context.getExecutorServiceManager().shutdown(executor);
executor = null;
}
}
}
| |
/*******************************************************************************
* Copyright (c) Intel Corporation
* Copyright (c) 2017
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.osc.core.broker.service.tasks.conformance.openstack.deploymentspec;
import java.util.Arrays;
import java.util.Map;
import java.util.Set;
import javax.persistence.EntityManager;
import org.osc.core.broker.job.lock.LockObjectReference;
import org.osc.core.broker.model.entities.appliance.ApplianceSoftwareVersion;
import org.osc.core.broker.model.entities.appliance.DistributedApplianceInstance;
import org.osc.core.broker.model.entities.appliance.VirtualSystem;
import org.osc.core.broker.model.entities.virtualization.VirtualizationConnector;
import org.osc.core.broker.model.entities.virtualization.openstack.DeploymentSpec;
import org.osc.core.broker.model.entities.virtualization.openstack.OsFlavorReference;
import org.osc.core.broker.model.entities.virtualization.openstack.OsImageReference;
import org.osc.core.broker.model.entities.virtualization.openstack.OsSecurityGroupReference;
import org.osc.core.broker.model.plugin.ApiFactoryService;
import org.osc.core.broker.rest.client.openstack.openstack4j.Endpoint;
import org.osc.core.broker.rest.client.openstack.openstack4j.Openstack4JNova;
import org.osc.core.broker.rest.client.openstack.openstack4j.Openstack4JNova.CreatedServerDetails;
import org.osc.core.broker.rest.client.openstack.vmidc.notification.runner.RabbitMQRunner;
import org.osc.core.broker.service.persistence.DistributedApplianceInstanceEntityMgr;
import org.osc.core.broker.service.persistence.OSCEntityManager;
import org.osc.core.broker.service.tasks.TransactionalTask;
import org.slf4j.LoggerFactory;
import org.osc.sdk.controller.DefaultInspectionPort;
import org.osc.sdk.controller.DefaultNetworkPort;
import org.osc.sdk.controller.api.SdnRedirectionApi;
import org.osc.sdk.controller.element.Element;
import org.osc.sdk.manager.api.ManagerDeviceApi;
import org.osc.sdk.manager.element.ApplianceBootstrapInformationElement;
import org.osc.sdk.manager.element.BootStrapInfoProviderElement;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.slf4j.Logger;
import com.google.common.collect.ImmutableMap;
/**
* Creates SVA for given dai
*/
@Component(service = OsSvaServerCreateTask.class)
public class OsSvaServerCreateTask extends TransactionalTask {
private static class ApplianceBootStrap implements BootStrapInfoProviderElement {
private String applianceName;
private Map<String, String> bootStrapProperties;
public ApplianceBootStrap(String applianceName, Map<String, String> bootStrapProperties) {
this.applianceName = applianceName;
this.bootStrapProperties = bootStrapProperties;
}
@Override
public String getName() {
return this.applianceName;
}
@Override
public Map<String, String> getBootStrapProperties() {
return this.bootStrapProperties;
}
}
private final Logger log = LoggerFactory.getLogger(OsSvaServerCreateTask.class);
@Reference
private ApiFactoryService apiFactoryService;
// target ensures this only binds to active runner published by Server
@Reference(target = "(active=true)")
private RabbitMQRunner activeRunner;
private DistributedApplianceInstance dai;
private String availabilityZone;
private String hypervisorHostName;
public OsSvaServerCreateTask create(DistributedApplianceInstance dai, String hypervisorName,
String availabilityZone) {
OsSvaServerCreateTask task = new OsSvaServerCreateTask();
task.apiFactoryService = this.apiFactoryService;
task.activeRunner = this.activeRunner;
task.dai = dai;
task.availabilityZone = availabilityZone;
task.hypervisorHostName = hypervisorName;
task.dbConnectionManager = this.dbConnectionManager;
task.txBroadcastUtil = this.txBroadcastUtil;
return task;
}
@Override
public void executeTransaction(EntityManager em) throws Exception {
this.dai = DistributedApplianceInstanceEntityMgr.findById(em, this.dai.getId());
DeploymentSpec ds = this.dai.getDeploymentSpec();
VirtualSystem vs = ds.getVirtualSystem();
VirtualizationConnector vc = vs.getVirtualizationConnector();
Endpoint endPoint = new Endpoint(vc, ds.getProjectName());
this.dai = DistributedApplianceInstanceEntityMgr.findById(em, this.dai.getId());
String applianceName = this.dai.getName();
String imageRefId = getImageRefIdByRegion(vs, ds.getRegion());
String flavorRef = getFlavorRefIdByRegion(vs, ds.getRegion());
OsSecurityGroupReference sgReference = ds.getOsSecurityGroupReference();
// Just some name for the file. We dont use file injection, this name gets populated within the
// meta_data.json file within openstack. We hardcode and look for content within 0000 file
String availabilityZone = this.availabilityZone.concat(":").concat(this.hypervisorHostName);
ApplianceSoftwareVersion applianceSoftwareVersion = this.dai.getVirtualSystem().getApplianceSoftwareVersion();
CreatedServerDetails createdServer;
// TODO: sjallapx - Hack to workaround issue SimpleDateFormat parse errors due to JCloud on some partner environments.
boolean createServerWithNoOSTSecurityGroup = this.dai.getVirtualSystem().getVirtualizationConnector()
.isControllerDefined() ? this.apiFactoryService.supportsPortGroup(this.dai.getVirtualSystem()) : false;
String sgRefName = createServerWithNoOSTSecurityGroup ? null : sgReference.getSgRefName();
try (Openstack4JNova nova = new Openstack4JNova(endPoint)) {
createdServer = nova.createServer(ds.getRegion(), availabilityZone, applianceName, imageRefId, flavorRef,
generateBootstrapInfo(vs, applianceName), ds.getManagementNetworkId(), ds.getInspectionNetworkId(),
applianceSoftwareVersion.hasAdditionalNicForInspection(), sgRefName);
}
this.dai.updateDaiOpenstackSvaInfo(createdServer.getServerId(),
createdServer.getIngressInspectionMacAddr(),
createdServer.getIngressInspectionPortId(),
createdServer.getEgressInspectionMacAddr(),
createdServer.getEgressInspectionPortId());
// Add new server ID to VM notification listener for this DS
this.activeRunner.getOsDeploymentSpecNotificationRunner()
.addSVAIdToListener(this.dai.getDeploymentSpec().getId(), createdServer.getServerId());
if (vc.isControllerDefined()) {
try (SdnRedirectionApi controller = this.apiFactoryService.createNetworkRedirectionApi(this.dai);) {
DefaultNetworkPort ingressPort = new DefaultNetworkPort(createdServer.getIngressInspectionPortId(),
createdServer.getIngressInspectionMacAddr());
DefaultNetworkPort egressPort = new DefaultNetworkPort(createdServer.getEgressInspectionPortId(),
createdServer.getEgressInspectionMacAddr());
if (this.apiFactoryService.supportsNeutronSFC(this.dai.getVirtualSystem())) {
// In case of neutron SFC, port group id needs to be used when registering and updated in the DS
String portGroupId = ds.getPortGroupId();
boolean pgAlreadyCreatedByOther = (portGroupId != null);
Element element = controller
.registerInspectionPort(new DefaultInspectionPort(ingressPort, egressPort, null, portGroupId));
portGroupId = element.getParentId();
this.log.info(String.format("Setting port_group_id to %s on DAI %s (id %d) for Deployment Spec %s (id: %d)",
portGroupId, this.dai.getName(), this.dai.getId(), ds.getName(), ds.getId()));
if (!pgAlreadyCreatedByOther) {
ds = em.find(DeploymentSpec.class, ds.getId());
ds.setPortGroupId(portGroupId);
OSCEntityManager.update(em, ds, this.txBroadcastUtil);
}
} else if (this.apiFactoryService.supportsPortGroup(this.dai.getVirtualSystem())) {
String domainId = OpenstackUtil.extractDomainId(ds.getProjectId(),
ds.getProjectName(),
ds.getVirtualSystem().getVirtualizationConnector(),
Arrays.asList(ingressPort));
ingressPort.setParentId(domainId);
egressPort.setParentId(domainId);
//Element object in DefaultInspectionport is not used at this point, hence null
controller.registerInspectionPort(
new DefaultInspectionPort(ingressPort, egressPort, null));
} else {
controller.registerInspectionPort(
new DefaultInspectionPort(ingressPort, egressPort, null));
}
}
}
this.log.info("Dai: " + this.dai + " Server Id set to: " + this.dai.getExternalId());
OSCEntityManager.update(em, this.dai, this.txBroadcastUtil);
}
private String getImageRefIdByRegion(VirtualSystem vs, String region) {
for (OsImageReference imageRef : vs.getOsImageReference()) {
if (imageRef.getRegion().equals(region)) {
return imageRef.getImageRefId();
}
}
return null;
}
private String getFlavorRefIdByRegion(VirtualSystem vs, String region) {
for (OsFlavorReference flavorRef : vs.getOsFlavorReference()) {
if (flavorRef.getRegion().equals(region)) {
return flavorRef.getFlavorRefId();
}
}
return null;
}
private ApplianceBootstrapInformationElement generateBootstrapInfo(final VirtualSystem vs,
final String applianceName) throws Exception {
try (ManagerDeviceApi deviceApi = this.apiFactoryService.createManagerDeviceApi(vs)) {
Map<String, String> bootstrapProperties = vs.getApplianceSoftwareVersion().getConfigProperties();
return deviceApi
.getBootstrapinfo(new ApplianceBootStrap(applianceName, ImmutableMap.copyOf(bootstrapProperties)));
}
}
@Override
public String getName() {
return String.format("Deploying SVA for Distributed Appliance Instance '%s'", this.dai.getName());
}
@Override
public Set<LockObjectReference> getObjects() {
return LockObjectReference.getObjectReferences(this.dai);
}
}
| |
package com.dianping.cat.plugin;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.regex.Pattern;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.unidal.helper.Files;
import org.unidal.helper.Files.AutoClose;
import com.dianping.cat.plugin.PropertyProviders.IValidator;
/**
* @goal install
* @aggregator true
*/
public class InstallMojo extends AbstractMojo {
private String m_path = "/data/appdatas/cat";
private String m_logPath = "/data/applogs/cat";
private String m_clientPath = m_path + File.separator + "client.xml";
private String m_serverPath = m_path + File.separator + "server.xml";
private String m_datasourcePath = m_path + File.separator + "datasources.xml";
/**
* @parameter property="jdbc.url"
* @readonly
*/
private String m_jdbcUrl;
/**
* @parameter property="jdbc.user"
* @readonly
*/
private String m_user;
/**
* @parameter property="jdbc.password"
* @readonly
*/
private String m_password;
/**
* @parameter property="verbose"
* @readonly
*/
private boolean m_verbose = false;
private void createDatabase(Statement stmt) throws SQLException {
ResultSet result = null;
try {
result = stmt.executeQuery("SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = 'cat'");
if (!result.next()) {
stmt.executeUpdate("create database cat");
} else {
getLog().info("Database 'cat' already exists, drop it first...");
stmt.executeUpdate("drop database cat");
getLog().info("Database 'cat' has dropped");
stmt.executeUpdate("create database cat");
}
} catch (SQLException e) {
if (e.getErrorCode() == 1007) {
getLog().info("Database 'cat' already exists, drop it first...");
stmt.executeUpdate("drop database cat");
getLog().info("Database 'cat' has dropped");
stmt.executeUpdate("create database cat");
} else {
throw e;
}
} finally {
result.close();
}
}
private void createTables(Statement stmt) throws IOException, SQLException {
String sqlTable = Files.forIO().readFrom(getClass().getResourceAsStream("Cat.sql"), "utf-8");
String[] tables = sqlTable.split(";");
for (String table : tables) {
if (table != null && table.trim().length() > 0) {
stmt.execute(table.trim() + ";");
}
}
}
private void debug(String info) {
if (m_verbose) {
getLog().debug(info);
}
}
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("Preparing Cat environment...");
validate();
if (setupDatabase() && setupConfigurationFiles()) {
getLog().info("Preparing Cat environment ... DONE");
getLog().info("Use following command line to start local Cat server:");
getLog().info(" cd cat-home; mvn jetty:run");
getLog().info("Please open http://localhost:2281/cat in your browser");
}
}
private Connection getConnection(String jdbcUrl) throws ClassNotFoundException, SQLException {
Class.forName("com.mysql.jdbc.Driver");
Connection conn = DriverManager.getConnection(jdbcUrl, m_user, m_password);
return conn;
}
private boolean setupConfigurationFiles() throws MojoFailureException {
File path = new File(m_path);
File logPath = new File(m_logPath);
File temp = null;
try {
temp = File.createTempFile("test", "test");
} catch (IOException e1) {
getLog()
.error(
"Don't have privilege to read/write temp dir, please manually promote read/write privileges to this directory.");
throw new MojoFailureException("Don't have privilege to read/write temp dir");
}
if (!path.exists()) {
boolean result = logPath.mkdirs() && path.mkdirs();
if (!result || temp.exists()) {
getLog().error("Don't have privilege to read/write " + m_path + ", please manually make this directory");
throw new MojoFailureException("Don't have privilege to read/write " + m_path);
}
}
getLog().info("Generating the configuration files to " + m_path + " ...");
boolean isSuccess = false;
try {
debug("Generating client.xml ...");
Files.forIO().copy(getClass().getResourceAsStream("client.xml"), new FileOutputStream(m_clientPath),
AutoClose.INPUT_OUTPUT);
debug("Generating server.xml ...");
Files.forIO().copy(getClass().getResourceAsStream("server.xml"), new FileOutputStream(m_serverPath),
AutoClose.INPUT_OUTPUT);
debug("Generating datasources.xml .");
String datasources = Files.forIO().readFrom(getClass().getResourceAsStream("datasources.xml"), "utf-8");
datasources = datasources.replaceAll(Pattern.quote("${jdbc.url}"), m_jdbcUrl + "/cat");
datasources = datasources.replaceAll(Pattern.quote("${jdbc.user}"), m_user);
datasources = datasources.replaceAll(Pattern.quote("${jdbc.password}"), m_password);
Files.forIO().writeTo(new File(m_datasourcePath), datasources);
getLog().info("Configuration files are generated successfully");
isSuccess = true;
} catch (Exception e) {
getLog().error(e);
}
return isSuccess;
}
private boolean setupDatabase() throws MojoFailureException {
Connection conn = null;
Statement stmt = null;
boolean isSuccess = false;
try {
getLog().info("Connecting to database(" + m_jdbcUrl + ") ...");
conn = getConnection(m_jdbcUrl);
getLog().info("Connected to database(" + m_jdbcUrl + ")");
getLog().info("Creating database(cat) ...");
stmt = conn.createStatement();
createDatabase(stmt);
getLog().info("Database(cat) is created successfully");
getLog().info("Creating tables ...");
createTables(stmt);
getLog().info("Tables are created successfully");
isSuccess = true;
} catch (Exception e) {
getLog().error(e);
throw new MojoFailureException(e.getMessage());
} finally {
try {
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
} catch (Exception e) {
// ignore it
}
}
return isSuccess;
}
private void validate() {
m_jdbcUrl = System.getenv("mysql_jdbcUrl");
m_user = System.getenv("mysql_username");
m_password = System.getenv("mysql_password");
if (m_jdbcUrl != null && m_user != null && m_password != null) {
// ignore it
} else {
m_jdbcUrl = PropertyProviders.fromConsole().forString("jdbc.url", "Please input jdbc url:", null,
"jdbc:mysql://127.0.0.1:3306", new IValidator<String>() {
@Override
public boolean validate(String url) {
if (url.startsWith("jdbc:mysql://")) {
return true;
} else {
return false;
}
}
});
m_user = PropertyProviders.fromConsole().forString("jdbc.user", "Please input username:", null, null, null);
m_password = PropertyProviders.fromConsole().forString("jdbc.password", "Please input password:", null, "",
null);
}
getLog().info("jdbc.url: " + m_jdbcUrl);
getLog().info("jdbc.user: " + m_user);
getLog().info("jdbc.password: " + m_password);
}
}
| |
/*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.nullToEmpty;
import static java.util.Comparator.comparingInt;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ListMultimap;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.rhino.Node;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import javax.annotation.Nullable;
/**
* RenameVars renames all the variables names into short names, to reduce code
* size and also to obfuscate the code.
*/
final class RenameVars implements CompilerPass {
/**
* Limit on number of locals in a scope for temporary local renaming
* when {@code preferStableNames} is true.
*/
private static final int MAX_LOCALS_IN_SCOPE_TO_TEMP_RENAME = 1000;
private final AbstractCompiler compiler;
/** List of global NAME nodes */
private final ArrayList<Node> globalNameNodes = new ArrayList<>();
/** List of local NAME nodes */
private final ArrayList<Node> localNameNodes = new ArrayList<>();
/** Mapping of original names for change detection */
private final Map<Node, String> originalNameByNode = new HashMap<>();
/**
* Maps a name node to its pseudo name, null if we are not generating so
* there will be no overhead unless we are debugging.
*/
private final Map<Node, String> pseudoNameMap;
/** Set of extern variable names */
private Set<String> externNames;
/** Set of reserved variable names */
private final Set<String> reservedNames;
/** The renaming map */
private final Map<String, String> renameMap = new HashMap<>();
/** The previously used rename map. */
private final VariableMap prevUsedRenameMap;
/** The global name prefix */
private final String prefix;
/** Counter for each assignment */
private int assignmentCount = 0;
// Logic for bleeding functions, where the name leaks into the outer
// scope on IE but not on other browsers.
private final Set<Var> localBleedingFunctions = new HashSet<>();
private final ListMultimap<Scope, Var> localBleedingFunctionsPerScope =
ArrayListMultimap.create();
class Assignment {
final boolean isLocal;
final String oldName;
final int orderOfOccurrence;
String newName;
int count; // Number of times this is referenced
Assignment(String name) {
this.isLocal = name.startsWith(LOCAL_VAR_PREFIX);
this.oldName = name;
this.newName = null;
this.count = 0;
// Represents the order at which a symbol appears in the source.
this.orderOfOccurrence = assignmentCount++;
}
/**
* Assigns the new name.
*/
void setNewName(String newName) {
checkState(this.newName == null);
this.newName = newName;
}
}
/** Maps an old name to a new name assignment */
private final Map<String, Assignment> assignments =
new HashMap<>();
/** Whether renaming should apply to local variables only. */
private final boolean localRenamingOnly;
private final boolean preferStableNames;
/** Characters that shouldn't be used in variable names. */
private final char[] reservedCharacters;
/** A prefix to distinguish temporary local names from global names */
private static final String LOCAL_VAR_PREFIX = "L ";
// Shared name generator
private final NameGenerator nameGenerator;
/*
* nameGenerator is a shared NameGenerator that this instance can use;
* the instance may reset or reconfigure it, so the caller should
* not expect any state to be preserved.
*/
RenameVars(
AbstractCompiler compiler,
String prefix,
boolean localRenamingOnly,
boolean generatePseudoNames,
boolean preferStableNames,
VariableMap prevUsedRenameMap,
@Nullable char[] reservedCharacters,
@Nullable Set<String> reservedNames,
NameGenerator nameGenerator) {
this.compiler = compiler;
this.prefix = nullToEmpty(prefix);
this.localRenamingOnly = localRenamingOnly;
if (generatePseudoNames) {
this.pseudoNameMap = new HashMap<>();
} else {
this.pseudoNameMap = null;
}
this.prevUsedRenameMap = prevUsedRenameMap;
this.reservedCharacters = reservedCharacters;
this.preferStableNames = preferStableNames;
if (reservedNames == null) {
this.reservedNames = new HashSet<>();
} else {
this.reservedNames = new HashSet<>(reservedNames);
}
this.nameGenerator = nameGenerator;
}
/**
* Iterate through the nodes, collect all the NAME nodes that need to be
* renamed, and count how many times each variable name is referenced.
*
* Keep track of all name references in globalNameNodes, and localNameNodes.
*
* To get shorter local variable renaming, we rename local variables to a
* temporary name "LOCAL_VAR_PREFIX + index" where index is the index of the
* variable declared in the local scope stack.
* e.g.
* Foo(fa, fb) {
* var c = function(d, e) { return fa; }
* }
* The indexes are: fa:0, fb:1, c:2, d:3, e:4
*
* In that way, local variable names are reused in each global function.
* e.g. the final code might look like
* function x(a,b) { ... }
* function y(a,b,c) { ... }
*/
class ProcessVars extends AbstractPostOrderCallback implements ScopedCallback {
@Override
public void enterScope(NodeTraversal t) {
if (t.inGlobalHoistScope() || !shouldTemporarilyRenameLocalsInScope(t.getScope())) {
return;
}
Scope scope = t.getScope();
for (Var current : scope.getVarIterable()) {
if (current.isBleedingFunction()) {
localBleedingFunctions.add(current);
localBleedingFunctionsPerScope.put(
scope.getParent(), current);
}
}
}
@Override
public void exitScope(NodeTraversal t) {}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (!(n.isName() || n.isImportStar())) {
return;
}
String name = n.getString();
// Ignore anonymous functions and classes.
if (name.isEmpty()) {
return;
}
// "import {x as y} from 'm';"
// Skip x because it's not a variable in this scope.
if (parent.isImportSpec() && parent.hasTwoChildren() && parent.getFirstChild() == n) {
return;
}
// Is this local or Global?
// Bleeding functions should be treated as part of their outer
// scope, because IE has bugs in how it handles bleeding
// functions.
Var var = t.getScope().getVar(name);
boolean local =
var != null
&& var.isLocal()
&& (var.getScope().getParent().isLocal() || !var.isBleedingFunction());
// Never rename references to the arguments array
if (var != null && var.isArguments()) {
reservedNames.add(name);
return;
}
// Are we renaming global variables?
if (!local && localRenamingOnly) {
reservedNames.add(name);
return;
}
// Check if we can rename this.
if (!okToRenameVar(name, local)) {
if (local) {
// Blindly de-uniquify for the Prototype library for
// http://blickly.github.io/closure-compiler-issues/#103
String newName = MakeDeclaredNamesUnique.ContextualRenameInverter.getOriginalName(name);
if (!newName.equals(name)) {
n.setString(newName);
}
}
return;
}
if (pseudoNameMap != null) {
recordPseudoName(n);
}
if (local && shouldTemporarilyRenameLocalsInScope(var.getScope())) {
// Give local variables a temporary name based on the
// variable's index in the scope to enable name reuse across
// locals in independent scopes.
String tempName = LOCAL_VAR_PREFIX + getLocalVarIndex(var);
incCount(tempName);
localNameNodes.add(n);
// Remember the original string in a name before it's temporarily filled with an "L".
originalNameByNode.put(n, n.getString());
n.setString(tempName);
} else if (var != null) { // Not an extern
// If it's global, increment global count
incCount(name);
globalNameNodes.add(n);
}
}
// Increment count of an assignment
void incCount(String name) {
Assignment s = assignments.computeIfAbsent(name, Assignment::new);
s.count++;
}
}
/**
* Sorts Assignment objects by their count, breaking ties by their order of
* occurrence in the source to ensure a deterministic total ordering.
*/
private static final Comparator<Assignment> FREQUENCY_COMPARATOR =
new Comparator<Assignment>() {
@Override
public int compare(Assignment a1, Assignment a2) {
if (a1.count != a2.count) {
return a2.count - a1.count;
}
// Break a tie using the order in which the variable first appears in
// the source.
return ORDER_OF_OCCURRENCE_COMPARATOR.compare(a1, a2);
}
};
/** Sorts Assignment objects by the order the variable name first appears in the source. */
private static final Comparator<Assignment> ORDER_OF_OCCURRENCE_COMPARATOR =
comparingInt((Assignment arg) -> arg.orderOfOccurrence);
@Override
public void process(Node externs, Node root) {
this.externNames = NodeUtil.collectExternVariableNames(this.compiler, externs);
originalNameByNode.clear();
// Do variable reference counting.
NodeTraversal.traverse(compiler, root, new ProcessVars());
// Make sure that new names don't overlap with extern names.
reservedNames.addAll(externNames);
// Rename vars, sorted by frequency of occurrence to minimize code size.
SortedSet<Assignment> varsByFrequency = new TreeSet<>(FREQUENCY_COMPARATOR);
varsByFrequency.addAll(assignments.values());
// First try to reuse names from an earlier compilation.
if (prevUsedRenameMap != null) {
reusePreviouslyUsedVariableMap(varsByFrequency);
}
// Assign names, sorted by descending frequency to minimize code size.
assignNames(varsByFrequency);
// Rename the globals!
for (Node n : globalNameNodes) {
setNameAndReport(n, getNewGlobalName(n));
}
// Rename the locals!
for (Node n : localNameNodes) {
setNameAndReport(n, getNewLocalName(n));
}
}
private void setNameAndReport(Node n, @Nullable String newName) {
// A null newName, indicates it should not be renamed.
if (newName != null && !newName.equals(n.getString())) {
n.setString(newName);
// Only mark changes if the final name change is different than it was original before being
// filled with the "L" temporary name.
if (!newName.equals(originalNameByNode.get(n))) {
compiler.reportChangeToEnclosingScope(n);
Node parent = n.getParent();
if (parent.isFunction() && NodeUtil.isFunctionDeclaration(parent)) {
// If we are renaming a function declaration, make sure the containing scope
// has the opportunity to act on the change.
compiler.reportChangeToEnclosingScope(parent);
}
}
}
}
@Nullable
private String getNewGlobalName(Node n) {
String oldName = n.getString();
Assignment a = assignments.get(oldName);
if (a.newName != null && !a.newName.equals(oldName)) {
if (pseudoNameMap != null) {
return pseudoNameMap.get(n);
}
return a.newName;
} else {
return null;
}
}
@Nullable
private String getNewLocalName(Node n) {
String oldTempName = n.getString();
Assignment a = assignments.get(oldTempName);
if (!a.newName.equals(oldTempName)) {
if (pseudoNameMap != null) {
return pseudoNameMap.get(n);
}
return a.newName;
}
return null;
}
private void recordPseudoName(Node n) {
// Variable names should be in a different name space than
// property pseudo names.
pseudoNameMap.put(n, '$' + n.getString() + "$$");
}
/**
* Runs through the assignments and reuses as many names as possible from the previously used
* variable map. Updates reservedNames with the set of names that were reused.
*/
private void reusePreviouslyUsedVariableMap(SortedSet<Assignment> varsToRename) {
// If prevUsedRenameMap had duplicate values then this pass would be
// non-deterministic.
// In such a case, the following will throw an IllegalArgumentException.
checkNotNull(prevUsedRenameMap.getNewNameToOriginalNameMap());
for (Assignment a : varsToRename) {
String prevNewName = prevUsedRenameMap.lookupNewName(a.oldName);
if (prevNewName == null || reservedNames.contains(prevNewName)) {
continue;
}
if (a.isLocal
|| (!externNames.contains(a.oldName)
&& prevNewName.startsWith(prefix))) {
reservedNames.add(prevNewName);
finalizeNameAssignment(a, prevNewName);
}
}
}
/**
* Determines which new names to substitute for the original names.
*/
private void assignNames(SortedSet<Assignment> varsToRename) {
nameGenerator.reset(reservedNames, prefix, reservedCharacters);
NameGenerator globalNameGenerator = nameGenerator;
// Local variables never need a prefix.
// Also, we need to avoid conflicts between global and local variable
// names; we do this by having using the same generator (not two
// instances). The case where global variables have a prefix (and
// therefore we use two different generators) but a local variable name
// might nevertheless conflict with a global one is not handled.
NameGenerator localNameGenerator =
prefix.isEmpty()
? globalNameGenerator
: nameGenerator.clone(reservedNames, "", reservedCharacters);
// Generated names and the assignments for non-local vars.
List<Assignment> pendingAssignments = new ArrayList<>();
List<String> generatedNamesForAssignments = new ArrayList<>();
for (Assignment a : varsToRename) {
if (a.newName != null) {
continue;
}
if (externNames.contains(a.oldName)) {
continue;
}
String newName;
if (a.isLocal) {
// For local variable, we make the assignment right away.
newName = localNameGenerator.generateNextName();
finalizeNameAssignment(a, newName);
} else {
// For non-local variable, delay finalizing the name assignment
// until we know how many new names we'll have of length 2, 3, etc.
newName = globalNameGenerator.generateNextName();
pendingAssignments.add(a);
generatedNamesForAssignments.add(newName);
}
reservedNames.add(newName);
}
// Now that we have a list of generated names, and a list of variable
// Assignment objects, we assign the generated names to the vars as
// follows:
// 1) The most frequent vars get the shorter names.
// 2) If N number of vars are going to be assigned names of the same
// length, we assign the N names based on the order at which the vars
// first appear in the source. This makes the output somewhat less
// random, because symbols declared close together are assigned names
// that are quite similar. With this heuristic, the output is more
// compressible.
// For instance, the output may look like:
// var da = "..", ea = "..";
// function fa() { .. } function ga() { .. }
int numPendingAssignments = generatedNamesForAssignments.size();
for (int i = 0; i < numPendingAssignments;) {
SortedSet<Assignment> varsByOrderOfOccurrence =
new TreeSet<>(ORDER_OF_OCCURRENCE_COMPARATOR);
// Add k number of Assignment to the set, where k is the number of
// generated names of the same length.
int len = generatedNamesForAssignments.get(i).length();
for (int j = i; j < numPendingAssignments
&& generatedNamesForAssignments.get(j).length() == len; j++) {
varsByOrderOfOccurrence.add(pendingAssignments.get(j));
}
// Now, make the assignments
for (Assignment a : varsByOrderOfOccurrence) {
finalizeNameAssignment(a, generatedNamesForAssignments.get(i));
++i;
}
}
}
/**
* Makes a final name assignment.
*/
private void finalizeNameAssignment(Assignment a, String newName) {
a.setNewName(newName);
// Keep track of the mapping
renameMap.put(a.oldName, newName);
}
/**
* Gets the variable map.
*/
VariableMap getVariableMap() {
return new VariableMap(ImmutableMap.copyOf(renameMap));
}
/**
* Determines whether a variable name is okay to rename.
*/
private boolean okToRenameVar(String name, boolean isLocal) {
return !compiler.getCodingConvention().isExported(name, isLocal);
}
/**
* Returns the index within the scope stack.
* e.g. function Foo(a) { var b; function c(d) { } }
* a = 0, b = 1, c = 2, d = 3
*/
private int getLocalVarIndex(Var v) {
int num = v.getIndex();
Scope s = v.getScope().getParent();
if (s == null) {
throw new IllegalArgumentException("Var is not local");
}
boolean isBleedingIntoScope = s.getParent() != null && localBleedingFunctions.contains(v);
while (s.getParent() != null) {
if (isBleedingIntoScope) {
num += localBleedingFunctionsPerScope.get(s).indexOf(v) + 1;
isBleedingIntoScope = false;
} else {
num += localBleedingFunctionsPerScope.get(s).size();
}
if (shouldTemporarilyRenameLocalsInScope(s)) {
num += s.getVarCount();
}
s = s.getParent();
}
return num;
}
/**
* Returns true if the local variables in a scope should be given
* temporary names (eg, 'L 123') prior to renaming to allow reuse of
* names across scopes. With {@code preferStableNames}, temporary
* renaming is disabled if the number of locals in the scope is
* above a heuristic threshold to allow effective reuse of rename
* maps (see {@code prevUsedRenameMap}). In scopes with many
* variables the temporary name given to a variable is unlikely to
* be the same temporary name used when the rename map was created.
*/
private boolean shouldTemporarilyRenameLocalsInScope(Scope s) {
return (!preferStableNames || s.getVarCount() <= MAX_LOCALS_IN_SCOPE_TO_TEMP_RENAME);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.qjournal;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.qjournal.server.JournalNode;
import org.apache.hadoop.net.NetUtils;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
public class MiniJournalCluster {
public static class Builder {
private String baseDir;
private int numJournalNodes = 3;
private boolean format = true;
private final Configuration conf;
public Builder(Configuration conf) {
this.conf = conf;
}
public Builder baseDir(String d) {
this.baseDir = d;
return this;
}
public Builder numJournalNodes(int n) {
this.numJournalNodes = n;
return this;
}
public Builder format(boolean f) {
this.format = f;
return this;
}
public MiniJournalCluster build() throws IOException {
return new MiniJournalCluster(this);
}
}
private static final class JNInfo {
private JournalNode node;
private final InetSocketAddress ipcAddr;
private final String httpServerURI;
private JNInfo(JournalNode node) {
this.node = node;
this.ipcAddr = node.getBoundIpcAddress();
this.httpServerURI = node.getHttpServerURI();
}
}
private static final Log LOG = LogFactory.getLog(MiniJournalCluster.class);
private final File baseDir;
private final JNInfo[] nodes;
private MiniJournalCluster(Builder b) throws IOException {
LOG.info("Starting MiniJournalCluster with " +
b.numJournalNodes + " journal nodes");
if (b.baseDir != null) {
this.baseDir = new File(b.baseDir);
} else {
this.baseDir = new File(MiniDFSCluster.getBaseDirectory());
}
nodes = new JNInfo[b.numJournalNodes];
for (int i = 0; i < b.numJournalNodes; i++) {
if (b.format) {
File dir = getStorageDir(i);
LOG.debug("Fully deleting JN directory " + dir);
FileUtil.fullyDelete(dir);
}
JournalNode jn = new JournalNode();
jn.setConf(createConfForNode(b, i));
jn.start();
nodes[i] = new JNInfo(jn);
}
}
/**
* Set up the given Configuration object to point to the set of JournalNodes
* in this cluster.
*/
public URI getQuorumJournalURI(String jid) {
List<String> addrs = Lists.newArrayList();
for (JNInfo info : nodes) {
addrs.add("127.0.0.1:" + info.ipcAddr.getPort());
}
String addrsVal = Joiner.on(";").join(addrs);
LOG.debug("Setting logger addresses to: " + addrsVal);
try {
return new URI("qjournal://" + addrsVal + "/" + jid);
} catch (URISyntaxException e) {
throw new AssertionError(e);
}
}
/**
* Start the JournalNodes in the cluster.
*/
public void start() throws IOException {
for (JNInfo info : nodes) {
info.node.start();
}
}
/**
* Shutdown all of the JournalNodes in the cluster.
* @throws IOException if one or more nodes failed to stop
*/
public void shutdown() throws IOException {
boolean failed = false;
for (JNInfo info : nodes) {
try {
info.node.stopAndJoin(0);
} catch (Exception e) {
failed = true;
LOG.warn("Unable to stop journal node " + info.node, e);
}
}
if (failed) {
throw new IOException("Unable to shut down. Check log for details");
}
}
private Configuration createConfForNode(Builder b, int idx) {
Configuration conf = new Configuration(b.conf);
File logDir = getStorageDir(idx);
conf.set(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY, logDir.toString());
conf.set(DFSConfigKeys.DFS_JOURNALNODE_RPC_ADDRESS_KEY, "localhost:0");
conf.set(DFSConfigKeys.DFS_JOURNALNODE_HTTP_ADDRESS_KEY, "localhost:0");
return conf;
}
public File getStorageDir(int idx) {
return new File(baseDir, "journalnode-" + idx).getAbsoluteFile();
}
public File getJournalDir(int idx, String jid) {
return new File(getStorageDir(idx), jid);
}
public File getCurrentDir(int idx, String jid) {
return new File(getJournalDir(idx, jid), "current");
}
public File getPreviousDir(int idx, String jid) {
return new File(getJournalDir(idx, jid), "previous");
}
public JournalNode getJournalNode(int i) {
return nodes[i].node;
}
public void restartJournalNode(int i) throws InterruptedException, IOException {
JNInfo info = nodes[i];
JournalNode jn = info.node;
Configuration conf = new Configuration(jn.getConf());
if (jn.isStarted()) {
jn.stopAndJoin(0);
}
conf.set(DFSConfigKeys.DFS_JOURNALNODE_RPC_ADDRESS_KEY,
NetUtils.getHostPortString(info.ipcAddr));
final String uri = info.httpServerURI;
if (uri.startsWith("http://")) {
conf.set(DFSConfigKeys.DFS_JOURNALNODE_HTTP_ADDRESS_KEY,
uri.substring(("http://".length())));
} else if (info.httpServerURI.startsWith("https://")) {
conf.set(DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_KEY,
uri.substring(("https://".length())));
}
JournalNode newJN = new JournalNode();
newJN.setConf(conf);
newJN.start();
info.node = newJN;
}
public int getQuorumSize() {
return nodes.length / 2 + 1;
}
public int getNumNodes() {
return nodes.length;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.curator.framework.recipes.leader;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.locks.InterProcessMutex;
import org.apache.curator.framework.state.ConnectionState;
import org.apache.curator.utils.CloseableExecutorService;
import org.apache.curator.utils.ThreadUtils;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.UnsupportedEncodingException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.AbstractExecutorService;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.curator.utils.PathUtils;
/**
* <p>
* Abstraction to select a "leader" amongst multiple contenders in a group of JMVs connected
* to a Zookeeper cluster. If a group of N thread/processes contends for leadership, one will
* be assigned leader until it releases leadership at which time another one from the group will
* be chosen.
* </p>
* <p>
* Note that this class uses an underlying {@link InterProcessMutex} and as a result leader
* election is "fair" - each user will become leader in the order originally requested
* (from ZK's point of view).
* </p>
*/
public class LeaderSelector implements Closeable
{
private final Logger log = LoggerFactory.getLogger(getClass());
private final CuratorFramework client;
private final LeaderSelectorListener listener;
private final CloseableExecutorService executorService;
private final InterProcessMutex mutex;
private final AtomicReference<State> state = new AtomicReference<State>(State.LATENT);
private final AtomicBoolean autoRequeue = new AtomicBoolean(false);
private final AtomicReference<Future<?>> ourTask = new AtomicReference<Future<?>>(null);
private volatile boolean hasLeadership;
private volatile String id = "";
@VisibleForTesting
volatile CountDownLatch debugLeadershipLatch = null;
volatile CountDownLatch debugLeadershipWaitLatch = null;
private enum State
{
LATENT,
STARTED,
CLOSED
}
// guarded by synchronization
private boolean isQueued = false;
private static final ThreadFactory defaultThreadFactory = ThreadUtils.newThreadFactory("LeaderSelector");
/**
* @param client the client
* @param leaderPath the path for this leadership group
* @param listener listener
*/
public LeaderSelector(CuratorFramework client, String leaderPath, LeaderSelectorListener listener)
{
this(client, leaderPath, new CloseableExecutorService(Executors.newSingleThreadExecutor(defaultThreadFactory), true), listener);
}
/**
* @param client the client
* @param leaderPath the path for this leadership group
* @param threadFactory factory to use for making internal threads
* @param executor the executor to run in
* @param listener listener
* @deprecated This constructor was poorly thought out. Custom executor is useless. Use this version instead: {@link #LeaderSelector(CuratorFramework, String, ExecutorService, LeaderSelectorListener)}
*/
@SuppressWarnings("UnusedParameters")
@Deprecated
public LeaderSelector(CuratorFramework client, String leaderPath, ThreadFactory threadFactory, Executor executor, LeaderSelectorListener listener)
{
this(client, leaderPath, new CloseableExecutorService(wrapExecutor(executor), true), listener);
}
/**
* @param client the client
* @param leaderPath the path for this leadership group
* @param executorService thread pool to use
* @param listener listener
*/
public LeaderSelector(CuratorFramework client, String leaderPath, ExecutorService executorService, LeaderSelectorListener listener)
{
this(client, leaderPath, new CloseableExecutorService(executorService), listener);
}
/**
* @param client the client
* @param leaderPath the path for this leadership group
* @param executorService thread pool to use
* @param listener listener
*/
public LeaderSelector(CuratorFramework client, String leaderPath, CloseableExecutorService executorService, LeaderSelectorListener listener)
{
Preconditions.checkNotNull(client, "client cannot be null");
PathUtils.validatePath(leaderPath);
Preconditions.checkNotNull(listener, "listener cannot be null");
this.client = client;
this.listener = new WrappedListener(this, listener);
hasLeadership = false;
this.executorService = executorService;
mutex = new InterProcessMutex(client, leaderPath)
{
@Override
protected byte[] getLockNodeBytes()
{
return (id.length() > 0) ? getIdBytes(id) : null;
}
};
}
static byte[] getIdBytes(String id)
{
try
{
return id.getBytes("UTF-8");
}
catch ( UnsupportedEncodingException e )
{
throw new Error(e); // this should never happen
}
}
/**
* By default, when {@link LeaderSelectorListener#takeLeadership(CuratorFramework)} returns, this
* instance is not requeued. Calling this method puts the leader selector into a mode where it
* will always requeue itself.
*/
public void autoRequeue()
{
autoRequeue.set(true);
}
/**
* Sets the ID to store for this leader. Will be the value returned
* when {@link #getParticipants()} is called. IMPORTANT: must be called
* prior to {@link #start()} to have effect.
*
* @param id ID
*/
public void setId(String id)
{
Preconditions.checkNotNull(id, "id cannot be null");
this.id = id;
}
/**
* Return the ID that was set via {@link #setId(String)}
*
* @return id
*/
public String getId()
{
return id;
}
/**
* Attempt leadership. This attempt is done in the background - i.e. this method returns
* immediately.<br><br>
* <b>IMPORTANT: </b> previous versions allowed this method to be called multiple times. This
* is no longer supported. Use {@link #requeue()} for this purpose.
*/
public void start()
{
Preconditions.checkState(state.compareAndSet(State.LATENT, State.STARTED), "Cannot be started more than once");
Preconditions.checkState(!executorService.isShutdown(), "Already started");
Preconditions.checkState(!hasLeadership, "Already has leadership");
client.getConnectionStateListenable().addListener(listener);
requeue();
}
/**
* Re-queue an attempt for leadership. If this instance is already queued, nothing
* happens and false is returned. If the instance was not queued, it is re-qeued and true
* is returned
*
* @return true if re-queue is successful
*/
public boolean requeue()
{
Preconditions.checkState(state.get() == State.STARTED, "close() has already been called");
return internalRequeue();
}
private synchronized boolean internalRequeue()
{
if ( !isQueued && (state.get() == State.STARTED) )
{
isQueued = true;
Future<Void> task = executorService.submit(new Callable<Void>()
{
@Override
public Void call() throws Exception
{
try
{
doWorkLoop();
}
finally
{
clearIsQueued();
if ( autoRequeue.get() )
{
internalRequeue();
}
}
return null;
}
});
ourTask.set(task);
return true;
}
return false;
}
/**
* Shutdown this selector and remove yourself from the leadership group
*/
public synchronized void close()
{
Preconditions.checkState(state.compareAndSet(State.STARTED, State.CLOSED), "Already closed or has not been started");
client.getConnectionStateListenable().removeListener(listener);
executorService.close();
ourTask.set(null);
}
/**
* <p>
* Returns the set of current participants in the leader selection
* </p>
* <p>
* <p>
* <B>NOTE</B> - this method polls the ZK server. Therefore it can possibly
* return a value that does not match {@link #hasLeadership()} as hasLeadership
* uses a local field of the class.
* </p>
*
* @return participants
* @throws Exception ZK errors, interruptions, etc.
*/
public Collection<Participant> getParticipants() throws Exception
{
Collection<String> participantNodes = mutex.getParticipantNodes();
return getParticipants(client, participantNodes);
}
static Collection<Participant> getParticipants(CuratorFramework client, Collection<String> participantNodes) throws Exception
{
ImmutableList.Builder<Participant> builder = ImmutableList.builder();
boolean isLeader = true;
for ( String path : participantNodes )
{
Participant participant = participantForPath(client, path, isLeader);
if( participant != null )
{
builder.add(participant);
isLeader = false; // by definition the first node is the leader
}
}
return builder.build();
}
/**
* <p>
* Return the id for the current leader. If for some reason there is no
* current leader, a dummy participant is returned.
* </p>
* <p>
* <p>
* <B>NOTE</B> - this method polls the ZK server. Therefore it can possibly
* return a value that does not match {@link #hasLeadership()} as hasLeadership
* uses a local field of the class.
* </p>
*
* @return leader
* @throws Exception ZK errors, interruptions, etc.
*/
public Participant getLeader() throws Exception
{
Collection<String> participantNodes = mutex.getParticipantNodes();
return getLeader(client, participantNodes);
}
static Participant getLeader(CuratorFramework client, Collection<String> participantNodes) throws Exception
{
Participant result = null;
if ( participantNodes.size() > 0 )
{
Iterator<String> iter = participantNodes.iterator();
while ( iter.hasNext() )
{
result = participantForPath(client, iter.next(), true);
if ( result != null )
{
break;
}
}
}
if( result == null )
{
result = new Participant();
}
return result;
}
/**
* Return true if leadership is currently held by this instance
*
* @return true/false
*/
public boolean hasLeadership()
{
return hasLeadership;
}
/**
* Attempt to cancel and interrupt the current leadership if this instance has leadership
*/
public synchronized void interruptLeadership()
{
Future<?> task = ourTask.get();
if ( task != null )
{
task.cancel(true);
}
}
private static Participant participantForPath(CuratorFramework client, String path, boolean markAsLeader) throws Exception
{
try
{
byte[] bytes = client.getData().forPath(path);
String thisId = new String(bytes, "UTF-8");
return new Participant(thisId, markAsLeader);
}
catch ( KeeperException.NoNodeException e )
{
return null;
}
}
@VisibleForTesting
volatile AtomicInteger failedMutexReleaseCount = null;
@VisibleForTesting
void doWork() throws Exception
{
hasLeadership = false;
try
{
mutex.acquire();
hasLeadership = true;
try
{
if ( debugLeadershipLatch != null )
{
debugLeadershipLatch.countDown();
}
if ( debugLeadershipWaitLatch != null )
{
debugLeadershipWaitLatch.await();
}
listener.takeLeadership(client);
}
catch ( InterruptedException e )
{
Thread.currentThread().interrupt();
throw e;
}
catch ( Throwable e )
{
ThreadUtils.checkInterrupted(e);
}
finally
{
clearIsQueued();
}
}
catch ( InterruptedException e )
{
Thread.currentThread().interrupt();
throw e;
}
finally
{
if ( hasLeadership )
{
hasLeadership = false;
boolean wasInterrupted = Thread.interrupted(); // clear any interrupted tatus so that mutex.release() works immediately
try
{
mutex.release();
}
catch ( Exception e )
{
if ( failedMutexReleaseCount != null )
{
failedMutexReleaseCount.incrementAndGet();
}
ThreadUtils.checkInterrupted(e);
log.error("The leader threw an exception", e);
// ignore errors - this is just a safety
}
finally
{
if ( wasInterrupted )
{
Thread.currentThread().interrupt();
}
}
}
}
}
private void doWorkLoop() throws Exception
{
KeeperException exception = null;
try
{
doWork();
}
catch ( KeeperException.ConnectionLossException e )
{
exception = e;
}
catch ( KeeperException.SessionExpiredException e )
{
exception = e;
}
catch ( InterruptedException ignore )
{
Thread.currentThread().interrupt();
}
if ( (exception != null) && !autoRequeue.get() ) // autoRequeue should ignore connection loss or session expired and just keep trying
{
throw exception;
}
}
private synchronized void clearIsQueued()
{
isQueued = false;
}
// temporary wrapper for deprecated constructor
private static ExecutorService wrapExecutor(final Executor executor)
{
return new AbstractExecutorService()
{
private volatile boolean isShutdown = false;
private volatile boolean isTerminated = false;
@Override
public void shutdown()
{
isShutdown = true;
}
@Override
public List<Runnable> shutdownNow()
{
return Lists.newArrayList();
}
@Override
public boolean isShutdown()
{
return isShutdown;
}
@Override
public boolean isTerminated()
{
return isTerminated;
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException
{
throw new UnsupportedOperationException();
}
@Override
public void execute(Runnable command)
{
try
{
executor.execute(command);
}
finally
{
isShutdown = true;
isTerminated = true;
}
}
};
}
private static class WrappedListener implements LeaderSelectorListener
{
private final LeaderSelector leaderSelector;
private final LeaderSelectorListener listener;
public WrappedListener(LeaderSelector leaderSelector, LeaderSelectorListener listener)
{
this.leaderSelector = leaderSelector;
this.listener = listener;
}
@Override
public void takeLeadership(CuratorFramework client) throws Exception
{
listener.takeLeadership(client);
}
@Override
public void stateChanged(CuratorFramework client, ConnectionState newState)
{
try
{
listener.stateChanged(client, newState);
}
catch ( CancelLeadershipException dummy )
{
leaderSelector.interruptLeadership();
}
}
}
}
| |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.heneryh.aquanotes.ui;
import java.sql.Date;
import java.text.SimpleDateFormat;
import com.heneryh.aquanotes.R;
import com.heneryh.aquanotes.provider.AquaNotesDbContract;
import com.heneryh.aquanotes.provider.AquaNotesDbContract.Data;
import com.heneryh.aquanotes.provider.AquaNotesDbContract.Probes;
import com.heneryh.aquanotes.util.ActivityHelper;
import com.heneryh.aquanotes.util.AnalyticsUtils;
import com.heneryh.aquanotes.util.NotifyingAsyncQueryHandler;
import com.heneryh.aquanotes.util.UIUtils;
import android.content.Context;
import android.content.Intent;
import android.database.ContentObserver;
import android.database.Cursor;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.SystemClock;
import android.provider.BaseColumns;
import android.support.v4.app.ListFragment;
import android.text.Spannable;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CursorAdapter;
import android.widget.ListView;
import android.widget.TextView;
import static com.heneryh.aquanotes.util.UIUtils.buildStyledSnippet;
import static com.heneryh.aquanotes.util.UIUtils.formatSessionSubtitle;
/**
* A {@link ListFragment} showing a list of sessions.
*/
public class DbMaintDataFragment extends ListFragment implements
NotifyingAsyncQueryHandler.AsyncQueryListener {
int mNum;
public static final String EXTRA_SCHEDULE_TIME_STRING =
"com.google.android.iosched.extra.SCHEDULE_TIME_STRING";
private static final String STATE_CHECKED_POSITION = "checkedPosition";
private Uri mTrackUri;
private Cursor mCursor;
private CursorAdapter mAdapter;
private int mCheckedPosition = -1;
private boolean mHasSetEmptyText = false;
private NotifyingAsyncQueryHandler mHandler;
private Handler mMessageQueueHandler = new Handler();
/**
* Create a new instance of CountingFragment, providing "num"
* as an argument.
*/
static DbMaintDataFragment newInstance(int num) {
DbMaintDataFragment f = new DbMaintDataFragment();
// Supply num input as an argument.
Bundle args = new Bundle();
args.putInt("num", num);
f.setArguments(args);
return f;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mNum = getArguments() != null ? getArguments().getInt("num") : 1;
mHandler = new NotifyingAsyncQueryHandler(getActivity().getContentResolver(), this);
reloadFromArguments(getArguments());
}
// /**
// * The Fragment's UI is xxx.
// */
// @Override
// public View onCreateView(LayoutInflater inflater, ViewGroup container,
// Bundle savedInstanceState) {
// View v = inflater.inflate(R.id.fragment_data, container, false);
// View tv = v.findViewById(R.id.text);
// ((TextView)tv).setText("Fragment #" + mNum);
// tv.setBackgroundDrawable(getResources().getDrawable(android.R.drawable.gallery_thumb));
// return v;
// }
public void reloadFromArguments(Bundle arguments) {
// Teardown from previous arguments
if (mCursor != null) {
getActivity().stopManagingCursor(mCursor);
mCursor = null;
}
mCheckedPosition = -1;
setListAdapter(null);
mHandler.cancelOperation(SearchQuery._TOKEN);
mHandler.cancelOperation(PDataQuery._TOKEN);
mHandler.cancelOperation(TracksQuery._TOKEN);
// Load new arguments
final Intent intent = BaseActivity.fragmentArgumentsToIntent(arguments);
//final Uri dataUri = intent.getData();
final int dataQueryToken;
final Uri dataUri = Data.CONTENT_URI; // not gettting from intent
if (dataUri == null) {
return;
}
String[] projection;
// if (!AquaNotesDbContract.Sessions.isSearchUri(dataUri)) {
mAdapter = new DataAdapter(getActivity());
projection = PDataQuery.PROJECTION;
dataQueryToken = PDataQuery._TOKEN;
// } else {
// mAdapter = new SearchAdapter(getActivity());
// projection = SearchQuery.PROJECTION;
// sessionQueryToken = SearchQuery._TOKEN;
// }
setListAdapter(mAdapter);
// Start background query to load sessions
mHandler.startQuery(dataQueryToken, null, dataUri, projection, null, null,
AquaNotesDbContract.Data.DEFAULT_SORT);
// If caller launched us with specific track hint, pass it along when
// launching session details. Also start a query to load the track info.
mTrackUri = intent.getParcelableExtra(SessionDetailFragment.EXTRA_TRACK);
if (mTrackUri != null) {
mHandler.startQuery(TracksQuery._TOKEN, mTrackUri, TracksQuery.PROJECTION);
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getListView().setChoiceMode(ListView.CHOICE_MODE_SINGLE);
if (savedInstanceState != null) {
mCheckedPosition = savedInstanceState.getInt(STATE_CHECKED_POSITION, -1);
}
if (!mHasSetEmptyText) {
// Could be a bug, but calling this twice makes it become visible when it shouldn't
// be visible.
setEmptyText(getString(R.string.empty_sessions));
mHasSetEmptyText = true;
}
}
/** {@inheritDoc} */
public void onQueryComplete(int token, Object cookie, Cursor cursor) {
if (getActivity() == null) {
return;
}
if (token == PDataQuery._TOKEN || token == SearchQuery._TOKEN) {
onDataOrSearchQueryComplete(cursor);
} else if (token == TracksQuery._TOKEN) {
onTrackQueryComplete(cursor);
} else {
Log.d("DbMaintDataFragment/onQueryComplete", "Query complete, Not Actionable: " + token);
cursor.close();
}
}
/**
* Handle {@link SessionsQuery} {@link Cursor}.
*/
private void onDataOrSearchQueryComplete(Cursor cursor) {
if (mCursor != null) {
// In case cancelOperation() doesn't work and we end up with consecutive calls to this
// callback.
getActivity().stopManagingCursor(mCursor);
mCursor = null;
}
mCursor = cursor;
getActivity().startManagingCursor(mCursor);
mAdapter.changeCursor(mCursor);
if (mCheckedPosition >= 0 && getView() != null) {
getListView().setItemChecked(mCheckedPosition, true);
}
}
/**
* Handle {@link TracksQuery} {@link Cursor}.
*/
private void onTrackQueryComplete(Cursor cursor) {
try {
if (!cursor.moveToFirst()) {
return;
}
// Use found track to build title-bar
ActivityHelper activityHelper = ((BaseActivity) getActivity()).getActivityHelper();
String trackName = cursor.getString(TracksQuery.TRACK_NAME);
activityHelper.setActionBarTitle(trackName);
activityHelper.setActionBarColor(cursor.getInt(TracksQuery.TRACK_COLOR));
AnalyticsUtils.getInstance(getActivity()).trackPageView("/Tracks/" + trackName);
} finally {
cursor.close();
}
}
@Override
public void onResume() {
super.onResume();
mMessageQueueHandler.post(mRefreshSessionsRunnable);
getActivity().getContentResolver().registerContentObserver(
AquaNotesDbContract.Sessions.CONTENT_URI, true, mSessionChangesObserver);
if (mCursor != null) {
mCursor.requery();
}
}
@Override
public void onPause() {
super.onPause();
mMessageQueueHandler.removeCallbacks(mRefreshSessionsRunnable);
getActivity().getContentResolver().unregisterContentObserver(mSessionChangesObserver);
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_CHECKED_POSITION, mCheckedPosition);
}
/** {@inheritDoc} */
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
// Launch viewer for specific session, passing along any track knowledge
// that should influence the title-bar.
final Cursor cursor = (Cursor)mAdapter.getItem(position);
final String sessionId = cursor.getString(cursor.getColumnIndex(
AquaNotesDbContract.Sessions.SESSION_ID));
final Uri sessionUri = AquaNotesDbContract.Sessions.buildSessionUri(sessionId);
final Intent intent = new Intent(Intent.ACTION_VIEW, sessionUri);
intent.putExtra(SessionDetailFragment.EXTRA_TRACK, mTrackUri);
((BaseActivity) getActivity()).openActivityOrFragment(intent);
getListView().setItemChecked(position, true);
mCheckedPosition = position;
}
public void clearCheckedPosition() {
if (mCheckedPosition >= 0) {
getListView().setItemChecked(mCheckedPosition, false);
mCheckedPosition = -1;
}
}
/**
* {@link CursorAdapter} that renders a {@link SessionsQuery}.
*/
private class DataAdapter extends CursorAdapter {
public DataAdapter(Context context) {
super(context, null);
}
/** {@inheritDoc} */
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return getActivity().getLayoutInflater().inflate(R.layout.list_item_datum, parent,
false);
}
/** {@inheritDoc} */
@Override
public void bindView(View view, Context context, Cursor cursor) {
final TextView titleView = (TextView) view.findViewById(R.id.datum_name);
final TextView subtitleView = (TextView) view.findViewById(R.id.datum_timstamp);
String name = cursor.getString(PDataQuery.NAME);
String value = cursor.getString(PDataQuery.VALUE);
long timestampL = cursor.getLong(PDataQuery.TIMESTAMP);
String line1 = name + " = " + value;
titleView.setText(line1);
// Format time block this session occupies
// final long blockStart = cursor.getLong(SessionsQuery.BLOCK_START);
// final long blockEnd = cursor.getLong(SessionsQuery.BLOCK_END);
// final String roomName = cursor.getString(SessionsQuery.ROOM_NAME);
// final String subtitle = formatSessionSubtitle(timestamp, timestamp, " ", context);
Date timestampD = new Date(timestampL);
SimpleDateFormat formatter = new SimpleDateFormat("M/d/yy h:mm a");
String timestampS = formatter.format(timestampD);
subtitleView.setText(timestampS);
final boolean starred = false; //cursor.getInt(SessionsQuery.STARRED) != 0;
view.findViewById(R.id.star_button).setVisibility(
starred ? View.VISIBLE : View.INVISIBLE);
// Possibly indicate that the session has occurred in the past.
// UIUtils.setSessionTitleColor(blockStart, blockEnd, titleView, subtitleView);
}
}
/**
* {@link CursorAdapter} that renders a {@link SearchQuery}.
*/
private class SearchAdapter extends CursorAdapter {
public SearchAdapter(Context context) {
super(context, null);
}
/** {@inheritDoc} */
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return getActivity().getLayoutInflater().inflate(R.layout.list_item_datum_oneline, parent,
false);
}
/** {@inheritDoc} */
@Override
public void bindView(View view, Context context, Cursor cursor) {
((TextView) view.findViewById(R.id.session_title)).setText(cursor
.getString(SearchQuery.TITLE));
final String snippet = cursor.getString(SearchQuery.SEARCH_SNIPPET);
final Spannable styledSnippet = buildStyledSnippet(snippet);
((TextView) view.findViewById(R.id.session_subtitle)).setText(styledSnippet);
final boolean starred = cursor.getInt(SearchQuery.STARRED) != 0;
view.findViewById(R.id.star_button).setVisibility(
starred ? View.VISIBLE : View.INVISIBLE);
}
}
private ContentObserver mSessionChangesObserver = new ContentObserver(new Handler()) {
@Override
public void onChange(boolean selfChange) {
if (mCursor != null) {
mCursor.requery();
}
}
};
private Runnable mRefreshSessionsRunnable = new Runnable() {
public void run() {
if (mAdapter != null) {
// This is used to refresh session title colors.
mAdapter.notifyDataSetChanged();
}
// Check again on the next quarter hour, with some padding to account for network
// time differences.
long nextQuarterHour = (SystemClock.uptimeMillis() / 900000 + 1) * 900000 + 5000;
mMessageQueueHandler.postAtTime(mRefreshSessionsRunnable, nextQuarterHour);
}
};
/**
* {@link com.heneryh.aquanotes.provider.AquaNotesDbContract.Sessions} query parameters.
*/
private interface SessionsQuery {
int _TOKEN = 0x1;
String[] PROJECTION = {
BaseColumns._ID,
AquaNotesDbContract.Sessions.SESSION_ID,
AquaNotesDbContract.Sessions.SESSION_TITLE,
AquaNotesDbContract.Sessions.SESSION_STARRED,
AquaNotesDbContract.Blocks.BLOCK_START,
AquaNotesDbContract.Blocks.BLOCK_END,
AquaNotesDbContract.Rooms.ROOM_NAME,
};
int _ID = 0;
int SESSION_ID = 1;
int TITLE = 2;
int STARRED = 3;
int BLOCK_START = 4;
int BLOCK_END = 5;
int ROOM_NAME = 6;
}
/**
* {@link com.heneryh.aquanotes.provider.AquaNotesDbContract.Tracks} query parameters.
*/
private interface TracksQuery {
int _TOKEN = 0x2;
String[] PROJECTION = {
AquaNotesDbContract.Tracks.TRACK_NAME,
AquaNotesDbContract.Tracks.TRACK_COLOR,
};
int TRACK_NAME = 0;
int TRACK_COLOR = 1;
}
/** {@link com.heneryh.aquanotes.provider.AquaNotesDbContract.Sessions} search query
* parameters. */
private interface SearchQuery {
int _TOKEN = 0x3;
String[] PROJECTION = {
BaseColumns._ID,
AquaNotesDbContract.Sessions.SESSION_ID,
AquaNotesDbContract.Sessions.SESSION_TITLE,
AquaNotesDbContract.Sessions.SEARCH_SNIPPET,
AquaNotesDbContract.Sessions.SESSION_STARRED,
};
int _ID = 0;
int SESSION_ID = 1;
int TITLE = 2;
int SEARCH_SNIPPET = 3;
int STARRED = 4;
}
private interface PDataQuery {
int _TOKEN = 0x4;
String[] PROJECTION = {
BaseColumns._ID,
AquaNotesDbContract.ViewProbeData.TYPE,
AquaNotesDbContract.ViewProbeData.VALUE,
AquaNotesDbContract.ViewProbeData.TIMESTAMP,
AquaNotesDbContract.ViewProbeData.PARENT_ID,
AquaNotesDbContract.ViewProbeData.NAME,
AquaNotesDbContract.ViewProbeData.RESOURCE_ID,
AquaNotesDbContract.ViewProbeData.CONTROLLER_ID,
};
int _ID = 0;
int TYPE = 1;
int VALUE = 2;
int TIMESTAMP = 3;
int PARENT_ID = 4;
int NAME = 5;
int RESOURCE_ID = 6;
int CONTROLLER_ID = 7;
}
private interface ODataQuery {
int _TOKEN = 0x5;
String[] PROJECTION = {
BaseColumns._ID,
AquaNotesDbContract.ViewOutletData.TYPE,
AquaNotesDbContract.ViewOutletData.VALUE,
AquaNotesDbContract.ViewOutletData.TIMESTAMP,
AquaNotesDbContract.ViewOutletData.PARENT_ID,
AquaNotesDbContract.ViewOutletData.NAME,
AquaNotesDbContract.ViewOutletData.DEVICE_ID,
AquaNotesDbContract.ViewOutletData.RESOURCE_ID,
AquaNotesDbContract.ViewOutletData.CONTROLLER_ID,
};
int _ID = 0;
int TYPE = 1;
int VALUE = 2;
int TIMESTAMP = 3;
int PARENT_ID = 4;
int NAME = 5;
int DEVICE_ID = 6;
int RESOURCE_ID = 7;
int CONTROLLER_ID = 8;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.hive.benchmark;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slices;
import io.trino.hadoop.HadoopNative;
import io.trino.plugin.hive.HiveCompressionCodec;
import io.trino.spi.Page;
import io.trino.spi.PageBuilder;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.connector.ConnectorPageSource;
import io.trino.spi.type.ArrayType;
import io.trino.spi.type.Type;
import io.trino.tpch.OrderColumn;
import it.unimi.dsi.fastutil.ints.IntArrays;
import org.openjdk.jmh.annotations.AuxCounters;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.results.RunResult;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.trino.jmh.Benchmarks.benchmark;
import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT;
import static io.trino.plugin.hive.HiveTestUtils.SESSION;
import static io.trino.plugin.hive.HiveTestUtils.mapType;
import static io.trino.plugin.hive.benchmark.BenchmarkFileFormatsUtils.MIN_DATA_SIZE;
import static io.trino.plugin.hive.benchmark.BenchmarkFileFormatsUtils.createTempDir;
import static io.trino.plugin.hive.benchmark.BenchmarkFileFormatsUtils.createTpchDataSet;
import static io.trino.plugin.hive.benchmark.BenchmarkFileFormatsUtils.nextRandomBetween;
import static io.trino.plugin.hive.benchmark.BenchmarkFileFormatsUtils.printResults;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.VarcharType.createUnboundedVarcharType;
import static io.trino.tpch.TpchTable.LINE_ITEM;
import static io.trino.tpch.TpchTable.ORDERS;
@State(Scope.Thread)
@OutputTimeUnit(TimeUnit.SECONDS)
@Measurement(iterations = 50)
@Warmup(iterations = 20)
@Fork(3)
@SuppressWarnings({"UseOfSystemOutOrSystemErr", "UnstableApiUsage", "ResultOfMethodCallIgnored"})
public class BenchmarkHiveFileFormat
{
static {
HadoopNative.requireHadoopNative();
}
@Param({
"LINEITEM",
"BIGINT_SEQUENTIAL",
"BIGINT_RANDOM",
"VARCHAR_SMALL",
"VARCHAR_LARGE",
"VARCHAR_DICTIONARY",
"MAP_VARCHAR_DOUBLE",
"LARGE_MAP_VARCHAR_DOUBLE",
"MAP_INT_DOUBLE",
"LARGE_MAP_INT_DOUBLE",
"LARGE_ARRAY_VARCHAR"})
private DataSet dataSet;
@Param({
"NONE",
"SNAPPY",
"GZIP"})
private HiveCompressionCodec compression;
@Param({
"TRINO_RCBINARY",
"TRINO_RCTEXT",
"TRINO_ORC",
"TRINO_PARQUET",
"HIVE_RCBINARY",
"HIVE_RCTEXT",
"HIVE_ORC",
"HIVE_PARQUET"})
private BenchmarkFileFormat benchmarkFileFormat;
private FileFormat fileFormat;
private TestData data;
private File dataFile;
private final File targetDir = createTempDir("trino-benchmark");
public BenchmarkHiveFileFormat()
{
}
public BenchmarkHiveFileFormat(DataSet dataSet, HiveCompressionCodec compression, BenchmarkFileFormat fileFormat)
{
this.dataSet = dataSet;
this.compression = compression;
this.benchmarkFileFormat = fileFormat;
}
@Setup
public void setup()
throws IOException
{
fileFormat = benchmarkFileFormat.getFormat();
data = dataSet.createTestData(fileFormat);
targetDir.mkdirs();
dataFile = new File(targetDir, UUID.randomUUID().toString());
writeData(dataFile);
}
@TearDown
public void tearDown()
throws IOException
{
deleteRecursively(targetDir.toPath(), ALLOW_INSECURE);
}
@SuppressWarnings("PublicField")
@AuxCounters
@State(Scope.Thread)
public static class CompressionCounter
{
public long inputSize;
public long outputSize;
}
@Benchmark
public List<Page> read(CompressionCounter counter)
throws IOException
{
if (!fileFormat.supports(data)) {
throw new RuntimeException(fileFormat + " does not support data set " + dataSet);
}
List<Page> pages = new ArrayList<>(100);
try (ConnectorPageSource pageSource = fileFormat.createFileFormatReader(
SESSION,
HDFS_ENVIRONMENT,
dataFile,
data.getColumnNames(),
data.getColumnTypes())) {
while (!pageSource.isFinished()) {
Page page = pageSource.getNextPage();
if (page != null) {
pages.add(page.getLoadedPage());
}
}
}
counter.inputSize += data.getSize();
counter.outputSize += dataFile.length();
return pages;
}
@Benchmark
public File write(CompressionCounter counter)
throws IOException
{
File targetFile = new File(targetDir, UUID.randomUUID().toString());
writeData(targetFile);
counter.inputSize += data.getSize();
counter.outputSize += targetFile.length();
return targetFile;
}
private void writeData(File targetFile)
throws IOException
{
List<Page> inputPages = data.getPages();
try (FormatWriter formatWriter = fileFormat.createFileFormatWriter(
SESSION,
targetFile,
data.getColumnNames(),
data.getColumnTypes(),
compression)) {
for (Page page : inputPages) {
formatWriter.writePage(page);
}
}
}
public enum DataSet
{
LINEITEM {
@Override
public TestData createTestData(FileFormat format)
{
return createTpchDataSet(format, LINE_ITEM, LINE_ITEM.getColumns());
}
},
BIGINT_SEQUENTIAL {
@Override
public TestData createTestData(FileFormat format)
{
return createTpchDataSet(format, ORDERS, OrderColumn.ORDER_KEY);
}
},
BIGINT_RANDOM {
@Override
public TestData createTestData(FileFormat format)
{
return createTpchDataSet(format, ORDERS, OrderColumn.CUSTOMER_KEY);
}
},
VARCHAR_SMALL {
@Override
public TestData createTestData(FileFormat format)
{
return createTpchDataSet(format, ORDERS, OrderColumn.CLERK);
}
},
VARCHAR_LARGE {
@Override
public TestData createTestData(FileFormat format)
{
return createTpchDataSet(format, ORDERS, OrderColumn.CLERK);
}
},
VARCHAR_DICTIONARY {
@Override
public TestData createTestData(FileFormat format)
{
return createTpchDataSet(format, ORDERS, OrderColumn.ORDER_PRIORITY);
}
},
MAP_VARCHAR_DOUBLE {
private static final int MIN_ENTRIES = 1;
private static final int MAX_ENTRIES = 5;
@Override
public TestData createTestData(FileFormat format)
{
Type type = mapType(createUnboundedVarcharType(), DOUBLE);
Random random = new Random(1234);
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(type));
ImmutableList.Builder<Page> pages = ImmutableList.builder();
int[] keys = {1, 2, 3, 4, 5};
long dataSize = 0;
while (dataSize < MIN_DATA_SIZE) {
pageBuilder.declarePosition();
BlockBuilder builder = pageBuilder.getBlockBuilder(0);
BlockBuilder mapBuilder = builder.beginBlockEntry();
int entries = nextRandomBetween(random, MIN_ENTRIES, MAX_ENTRIES);
IntArrays.shuffle(keys, random);
for (int entryId = 0; entryId < entries; entryId++) {
createUnboundedVarcharType().writeSlice(mapBuilder, Slices.utf8Slice("key" + keys[entryId]));
DOUBLE.writeDouble(mapBuilder, random.nextDouble());
}
builder.closeEntry();
if (pageBuilder.isFull()) {
Page page = pageBuilder.build();
pages.add(page);
pageBuilder.reset();
dataSize += page.getSizeInBytes();
}
}
return new TestData(ImmutableList.of("map"), ImmutableList.of(type), pages.build());
}
},
LARGE_MAP_VARCHAR_DOUBLE {
private static final int MIN_ENTRIES = 5_000;
private static final int MAX_ENTRIES = 15_000;
@Override
public TestData createTestData(FileFormat format)
{
Type type = mapType(createUnboundedVarcharType(), DOUBLE);
Random random = new Random(1234);
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(type));
ImmutableList.Builder<Page> pages = ImmutableList.builder();
long dataSize = 0;
while (dataSize < MIN_DATA_SIZE) {
pageBuilder.declarePosition();
BlockBuilder builder = pageBuilder.getBlockBuilder(0);
BlockBuilder mapBuilder = builder.beginBlockEntry();
int entries = nextRandomBetween(random, MIN_ENTRIES, MAX_ENTRIES);
for (int entryId = 0; entryId < entries; entryId++) {
createUnboundedVarcharType().writeSlice(mapBuilder, Slices.utf8Slice("key" + random.nextInt(10_000_000)));
DOUBLE.writeDouble(mapBuilder, random.nextDouble());
}
builder.closeEntry();
if (pageBuilder.isFull()) {
Page page = pageBuilder.build();
pages.add(page);
pageBuilder.reset();
dataSize += page.getSizeInBytes();
}
}
return new TestData(ImmutableList.of("map"), ImmutableList.of(type), pages.build());
}
},
MAP_INT_DOUBLE {
private static final int MIN_ENTRIES = 1;
private static final int MAX_ENTRIES = 5;
@Override
public TestData createTestData(FileFormat format)
{
Type type = mapType(INTEGER, DOUBLE);
Random random = new Random(1234);
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(type));
ImmutableList.Builder<Page> pages = ImmutableList.builder();
int[] keys = {1, 2, 3, 4, 5};
long dataSize = 0;
while (dataSize < MIN_DATA_SIZE) {
pageBuilder.declarePosition();
BlockBuilder builder = pageBuilder.getBlockBuilder(0);
BlockBuilder mapBuilder = builder.beginBlockEntry();
int entries = nextRandomBetween(random, MIN_ENTRIES, MAX_ENTRIES);
IntArrays.shuffle(keys, random);
for (int entryId = 0; entryId < entries; entryId++) {
INTEGER.writeLong(mapBuilder, keys[entryId]);
DOUBLE.writeDouble(mapBuilder, random.nextDouble());
}
builder.closeEntry();
if (pageBuilder.isFull()) {
Page page = pageBuilder.build();
pages.add(page);
pageBuilder.reset();
dataSize += page.getSizeInBytes();
}
}
return new TestData(ImmutableList.of("map"), ImmutableList.of(type), pages.build());
}
},
LARGE_MAP_INT_DOUBLE {
private static final int MIN_ENTRIES = 5_000;
private static final int MAX_ENTRIES = 15_0000;
@Override
public TestData createTestData(FileFormat format)
{
Type type = mapType(INTEGER, DOUBLE);
Random random = new Random(1234);
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(type));
ImmutableList.Builder<Page> pages = ImmutableList.builder();
long dataSize = 0;
while (dataSize < MIN_DATA_SIZE) {
pageBuilder.declarePosition();
BlockBuilder builder = pageBuilder.getBlockBuilder(0);
BlockBuilder mapBuilder = builder.beginBlockEntry();
int entries = nextRandomBetween(random, MIN_ENTRIES, MAX_ENTRIES);
for (int entryId = 0; entryId < entries; entryId++) {
INTEGER.writeLong(mapBuilder, random.nextInt(10_000_000));
DOUBLE.writeDouble(mapBuilder, random.nextDouble());
}
builder.closeEntry();
if (pageBuilder.isFull()) {
Page page = pageBuilder.build();
pages.add(page);
pageBuilder.reset();
dataSize += page.getSizeInBytes();
}
}
return new TestData(ImmutableList.of("map"), ImmutableList.of(type), pages.build());
}
},
LARGE_ARRAY_VARCHAR {
private static final int MIN_ENTRIES = 5_000;
private static final int MAX_ENTRIES = 15_0000;
@Override
public TestData createTestData(FileFormat format)
{
Type type = new ArrayType(createUnboundedVarcharType());
Random random = new Random(1234);
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(type));
ImmutableList.Builder<Page> pages = ImmutableList.builder();
long dataSize = 0;
while (dataSize < MIN_DATA_SIZE) {
pageBuilder.declarePosition();
BlockBuilder builder = pageBuilder.getBlockBuilder(0);
BlockBuilder mapBuilder = builder.beginBlockEntry();
int entries = nextRandomBetween(random, MIN_ENTRIES, MAX_ENTRIES);
for (int entryId = 0; entryId < entries; entryId++) {
createUnboundedVarcharType().writeSlice(mapBuilder, Slices.utf8Slice("key" + random.nextInt(10_000_000)));
}
builder.closeEntry();
if (pageBuilder.isFull()) {
Page page = pageBuilder.build();
pages.add(page);
pageBuilder.reset();
dataSize += page.getSizeInBytes();
}
}
return new TestData(ImmutableList.of("map"), ImmutableList.of(type), pages.build());
}
};
public abstract TestData createTestData(FileFormat format);
}
public static void main(String[] args)
throws Exception
{
Collection<RunResult> results = benchmark(BenchmarkHiveFileFormat.class)
.withOptions(optionsBuilder -> optionsBuilder.jvmArgsAppend("-Xmx4g", "-Xms4g", "-XX:+UseG1GC"))
.run();
printResults(results);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner.sanity;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.prestosql.connector.CatalogName;
import io.prestosql.execution.warnings.WarningCollector;
import io.prestosql.metadata.Metadata;
import io.prestosql.metadata.TableHandle;
import io.prestosql.plugin.tpch.TpchColumnHandle;
import io.prestosql.plugin.tpch.TpchTableHandle;
import io.prestosql.spi.type.TypeOperators;
import io.prestosql.sql.parser.SqlParser;
import io.prestosql.sql.planner.PlanNodeIdAllocator;
import io.prestosql.sql.planner.Symbol;
import io.prestosql.sql.planner.TypeAnalyzer;
import io.prestosql.sql.planner.TypeProvider;
import io.prestosql.sql.planner.assertions.BasePlanTest;
import io.prestosql.sql.planner.iterative.rule.test.PlanBuilder;
import io.prestosql.sql.planner.plan.PlanNode;
import io.prestosql.sql.planner.plan.TableScanNode;
import io.prestosql.testing.TestingTransactionHandle;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.Optional;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.sql.planner.plan.AggregationNode.Step.FINAL;
import static io.prestosql.sql.planner.plan.AggregationNode.Step.PARTIAL;
import static io.prestosql.sql.planner.plan.AggregationNode.groupingSets;
import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.LOCAL;
import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.REMOTE;
import static io.prestosql.sql.planner.plan.ExchangeNode.Type.REPARTITION;
import static io.prestosql.sql.planner.plan.JoinNode.Type.INNER;
public class TestValidateAggregationsWithDefaultValues
extends BasePlanTest
{
private Metadata metadata;
private TypeOperators typeOperators = new TypeOperators();
private PlanBuilder builder;
private Symbol symbol;
private TableScanNode tableScanNode;
@BeforeClass
public void setup()
{
metadata = getQueryRunner().getMetadata();
builder = new PlanBuilder(new PlanNodeIdAllocator(), metadata);
CatalogName catalogName = getCurrentConnectorId();
TableHandle nationTableHandle = new TableHandle(
catalogName,
new TpchTableHandle("nation", 1.0),
TestingTransactionHandle.create(),
Optional.empty());
TpchColumnHandle nationkeyColumnHandle = new TpchColumnHandle("nationkey", BIGINT);
symbol = new Symbol("nationkey");
tableScanNode = builder.tableScan(nationTableHandle, ImmutableList.of(symbol), ImmutableMap.of(symbol, nationkeyColumnHandle));
}
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Final aggregation with default value not separated from partial aggregation by remote hash exchange")
public void testGloballyDistributedFinalAggregationInTheSameStageAsPartialAggregation()
{
PlanNode root = builder.aggregation(
af -> af.step(FINAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(builder.aggregation(ap -> ap
.step(PARTIAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(tableScanNode))));
validatePlan(root, false);
}
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Final aggregation with default value not separated from partial aggregation by local hash exchange")
public void testSingleNodeFinalAggregationInTheSameStageAsPartialAggregation()
{
PlanNode root = builder.aggregation(
af -> af.step(FINAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(builder.aggregation(ap -> ap
.step(PARTIAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(tableScanNode))));
validatePlan(root, true);
}
@Test
public void testSingleThreadFinalAggregationInTheSameStageAsPartialAggregation()
{
PlanNode root = builder.aggregation(
af -> af.step(FINAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(builder.aggregation(ap -> ap
.step(PARTIAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(builder.values()))));
validatePlan(root, true);
}
@Test
public void testGloballyDistributedFinalAggregationSeparatedFromPartialAggregationByRemoteHashExchange()
{
Symbol symbol = new Symbol("symbol");
PlanNode root = builder.aggregation(
af -> af.step(FINAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(builder.exchange(e -> e
.type(REPARTITION)
.scope(REMOTE)
.fixedHashDistributionParitioningScheme(ImmutableList.of(symbol), ImmutableList.of(symbol))
.addInputsSet(symbol)
.addSource(builder.aggregation(ap -> ap
.step(PARTIAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(tableScanNode))))));
validatePlan(root, false);
}
@Test
public void testSingleNodeFinalAggregationSeparatedFromPartialAggregationByLocalHashExchange()
{
Symbol symbol = new Symbol("symbol");
PlanNode root = builder.aggregation(
af -> af.step(FINAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(builder.exchange(e -> e
.type(REPARTITION)
.scope(LOCAL)
.fixedHashDistributionParitioningScheme(ImmutableList.of(symbol), ImmutableList.of(symbol))
.addInputsSet(symbol)
.addSource(builder.aggregation(ap -> ap
.step(PARTIAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(tableScanNode))))));
validatePlan(root, true);
}
@Test
public void testWithPartialAggregationBelowJoin()
{
Symbol symbol = new Symbol("symbol");
PlanNode root = builder.aggregation(
af -> af.step(FINAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(builder.join(
INNER,
builder.exchange(e -> e
.type(REPARTITION)
.scope(LOCAL)
.fixedHashDistributionParitioningScheme(ImmutableList.of(symbol), ImmutableList.of(symbol))
.addInputsSet(symbol)
.addSource(builder.aggregation(ap -> ap
.step(PARTIAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(tableScanNode)))),
builder.values())));
validatePlan(root, true);
}
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Final aggregation with default value not separated from partial aggregation by local hash exchange")
public void testWithPartialAggregationBelowJoinWithoutSeparatingExchange()
{
Symbol symbol = new Symbol("symbol");
PlanNode root = builder.aggregation(
af -> af.step(FINAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(builder.join(
INNER,
builder.aggregation(ap -> ap
.step(PARTIAL)
.groupingSets(groupingSets(ImmutableList.of(symbol), 2, ImmutableSet.of(0)))
.source(tableScanNode)),
builder.values())));
validatePlan(root, true);
}
private void validatePlan(PlanNode root, boolean forceSingleNode)
{
getQueryRunner().inTransaction(session -> {
// metadata.getCatalogHandle() registers the catalog for the transaction
session.getCatalog().ifPresent(catalog -> metadata.getCatalogHandle(session, catalog));
new ValidateAggregationsWithDefaultValues(forceSingleNode).validate(
root,
session,
metadata,
typeOperators,
new TypeAnalyzer(new SqlParser(), metadata),
TypeProvider.empty(),
WarningCollector.NOOP);
return null;
});
}
}
| |
// ========================================================================
// Copyright (c) 1999-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.jndi;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.naming.Binding;
import javax.naming.CompoundName;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.LinkRef;
import javax.naming.Name;
import javax.naming.NameAlreadyBoundException;
import javax.naming.NameClassPair;
import javax.naming.NameNotFoundException;
import javax.naming.NameParser;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.NotContextException;
import javax.naming.OperationNotSupportedException;
import javax.naming.Reference;
import javax.naming.Referenceable;
import javax.naming.spi.NamingManager;
import org.eclipse.jetty.util.component.Dumpable;
import org.eclipse.jetty.util.log.Logger;
/*------------------------------------------------*/
/** NamingContext
* <p>Implementation of Context interface.
*
* <p><h4>Notes</h4>
* <p>All Names are expected to be Compound, not Composite.
*
* <p><h4>Usage</h4>
* <pre>
*/
/*
* </pre>
*
* @see
*
*
* @version 1.0
*/
public class NamingContext implements Context, Cloneable, Dumpable
{
private final static Logger __log=NamingUtil.__log;
private final static List<Binding> __empty = Collections.emptyList();
public static final String LOCK_PROPERTY = "org.eclipse.jndi.lock";
public static final String UNLOCK_PROPERTY = "org.eclipse.jndi.unlock";
protected final Hashtable<String,Object> _env = new Hashtable<String,Object>();
protected Map<String,Binding> _bindings = new HashMap<String,Binding>();
protected NamingContext _parent = null;
protected String _name = null;
protected NameParser _parser = null;
private Collection<Listener> _listeners;
/*------------------------------------------------*/
/**
* Naming Context Listener.
*/
public interface Listener
{
/**
* Called by {@link NamingContext#addBinding(Name, Object)} when adding
* a binding.
* @param ctx The context to add to.
* @param binding The binding to add.
* @return The binding to bind, or null if the binding should be ignored.
*/
Binding bind(NamingContext ctx, Binding binding);
/**
* @param ctx The context to unbind from
* @param binding The binding that was unbound.
*/
void unbind(NamingContext ctx, Binding binding);
}
/*------------------------------------------------*/
/** NameEnumeration
* <p>Implementation of NamingEnumeration interface.
*
* <p><h4>Notes</h4>
* <p>Used for returning results of Context.list();
*
* <p><h4>Usage</h4>
* <pre>
*/
/*
* </pre>
*
* @see
*
*/
public class NameEnumeration implements NamingEnumeration<NameClassPair>
{
Iterator<Binding> _delegate;
public NameEnumeration (Iterator<Binding> e)
{
_delegate = e;
}
public void close()
throws NamingException
{
}
public boolean hasMore ()
throws NamingException
{
return _delegate.hasNext();
}
public NameClassPair next()
throws NamingException
{
Binding b = _delegate.next();
return new NameClassPair(b.getName(),b.getClassName(),true);
}
public boolean hasMoreElements()
{
return _delegate.hasNext();
}
public NameClassPair nextElement()
{
Binding b = _delegate.next();
return new NameClassPair(b.getName(),b.getClassName(),true);
}
}
/*------------------------------------------------*/
/** BindingEnumeration
* <p>Implementation of NamingEnumeration
*
* <p><h4>Notes</h4>
* <p>Used to return results of Context.listBindings();
*
* <p><h4>Usage</h4>
* <pre>
*/
/*
* </pre>
*
* @see
*
*/
public class BindingEnumeration implements NamingEnumeration<Binding>
{
Iterator<Binding> _delegate;
public BindingEnumeration (Iterator<Binding> e)
{
_delegate = e;
}
public void close()
throws NamingException
{
}
public boolean hasMore ()
throws NamingException
{
return _delegate.hasNext();
}
public Binding next()
throws NamingException
{
Binding b = (Binding)_delegate.next();
return new Binding (b.getName(), b.getClassName(), b.getObject(), true);
}
public boolean hasMoreElements()
{
return _delegate.hasNext();
}
public Binding nextElement()
{
Binding b = (Binding)_delegate.next();
return new Binding (b.getName(), b.getClassName(), b.getObject(),true);
}
}
/*------------------------------------------------*/
/**
* Constructor
*
* @param env environment properties
* @param name relative name of this context
* @param parent immediate ancestor Context (can be null)
* @param parser NameParser for this Context
*/
public NamingContext(Hashtable<String,Object> env,
String name,
NamingContext parent,
NameParser parser)
{
if (env != null)
_env.putAll(env);
_name = name;
_parent = parent;
_parser = parser;
}
/*------------------------------------------------*/
/**
* Creates a new <code>NamingContext</code> instance.
*
* @param env a <code>Hashtable</code> value
*/
public NamingContext (Hashtable<String,Object> env)
{
if (env != null)
_env.putAll(env);
}
/*------------------------------------------------*/
/**
* Constructor
*
*/
public NamingContext ()
{
}
/*------------------------------------------------*/
/**
* Clone this NamingContext
*
* @return copy of this NamingContext
* @exception CloneNotSupportedException if an error occurs
*/
public Object clone ()
throws CloneNotSupportedException
{
NamingContext ctx = (NamingContext)super.clone();
ctx._env.putAll(_env);
ctx._bindings.putAll(_bindings);
return ctx;
}
/*------------------------------------------------*/
/**
* Getter for _name
*
* @return name of this Context (relative, not absolute)
*/
public String getName ()
{
return _name;
}
/*------------------------------------------------*/
/**
* Getter for _parent
*
* @return parent Context
*/
public Context getParent()
{
return _parent;
}
/*------------------------------------------------*/
/**
* Setter for _parser
*
*
*/
public void setNameParser (NameParser parser)
{
_parser = parser;
}
/*------------------------------------------------*/
/**
* Bind a name to an object
*
* @param name Name of the object
* @param obj object to bind
* @exception NamingException if an error occurs
*/
public void bind(Name name, Object obj)
throws NamingException
{
if (isLocked())
throw new NamingException ("This context is immutable");
Name cname = toCanonicalName(name);
if (cname == null)
throw new NamingException ("Name is null");
if (cname.size() == 0)
throw new NamingException ("Name is empty");
//if no subcontexts, just bind it
if (cname.size() == 1)
{
//get the object to be bound
Object objToBind = NamingManager.getStateToBind(obj, name,this, _env);
// Check for Referenceable
if (objToBind instanceof Referenceable)
{
objToBind = ((Referenceable)objToBind).getReference();
}
//anything else we should be able to bind directly
addBinding (cname, objToBind);
}
else
{
if(__log.isDebugEnabled())__log.debug("Checking for existing binding for name="+cname+" for first element of name="+cname.get(0));
//walk down the subcontext hierarchy
//need to ignore trailing empty "" name components
String firstComponent = cname.get(0);
Object ctx = null;
if (firstComponent.equals(""))
ctx = this;
else
{
Binding binding = getBinding (firstComponent);
if (binding == null)
throw new NameNotFoundException (firstComponent+ " is not bound");
ctx = binding.getObject();
if (ctx instanceof Reference)
{
//deference the object
try
{
ctx = NamingManager.getObjectInstance(ctx, getNameParser("").parse(firstComponent), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
}
if (ctx instanceof Context)
{
((Context)ctx).bind (cname.getSuffix(1), obj);
}
else
throw new NotContextException ("Object bound at "+firstComponent +" is not a Context");
}
}
/*------------------------------------------------*/
/**
* Bind a name (as a String) to an object
*
* @param name a <code>String</code> value
* @param obj an <code>Object</code> value
* @exception NamingException if an error occurs
*/
public void bind(String name, Object obj)
throws NamingException
{
bind (_parser.parse(name), obj);
}
/*------------------------------------------------*/
/**
* Create a context as a child of this one
*
* @param name a <code>Name</code> value
* @return a <code>Context</code> value
* @exception NamingException if an error occurs
*/
public Context createSubcontext (Name name)
throws NamingException
{
if (isLocked())
{
NamingException ne = new NamingException ("This context is immutable");
ne.setRemainingName(name);
throw ne;
}
Name cname = toCanonicalName (name);
if (cname == null)
throw new NamingException ("Name is null");
if (cname.size() == 0)
throw new NamingException ("Name is empty");
if (cname.size() == 1)
{
//not permitted to bind if something already bound at that name
Binding binding = getBinding (cname);
if (binding != null)
throw new NameAlreadyBoundException (cname.toString());
Context ctx = new NamingContext ((Hashtable)_env.clone(), cname.get(0), this, _parser);
addBinding (cname, ctx);
return ctx;
}
//If the name has multiple subcontexts, walk the hierarchy by
//fetching the first one. All intermediate subcontexts in the
//name must already exist.
String firstComponent = cname.get(0);
Object ctx = null;
if (firstComponent.equals(""))
ctx = this;
else
{
Binding binding = getBinding (firstComponent);
if (binding == null)
throw new NameNotFoundException (firstComponent + " is not bound");
ctx = binding.getObject();
if (ctx instanceof Reference)
{
//deference the object
if(__log.isDebugEnabled())__log.debug("Object bound at "+firstComponent +" is a Reference");
try
{
ctx = NamingManager.getObjectInstance(ctx, getNameParser("").parse(firstComponent), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
}
if (ctx instanceof Context)
{
return ((Context)ctx).createSubcontext (cname.getSuffix(1));
}
else
throw new NotContextException (firstComponent +" is not a Context");
}
/*------------------------------------------------*/
/**
* Create a Context as a child of this one
*
* @param name a <code>String</code> value
* @return a <code>Context</code> value
* @exception NamingException if an error occurs
*/
public Context createSubcontext (String name)
throws NamingException
{
return createSubcontext(_parser.parse(name));
}
/*------------------------------------------------*/
/**
* Not supported
*
* @param name name of subcontext to remove
* @exception NamingException if an error occurs
*/
public void destroySubcontext (String name)
throws NamingException
{
removeBinding(_parser.parse(name));
}
/*------------------------------------------------*/
/**
* Not supported
*
* @param name name of subcontext to remove
* @exception NamingException if an error occurs
*/
public void destroySubcontext (Name name)
throws NamingException
{
removeBinding(name);
}
/*------------------------------------------------*/
/**
* Lookup a binding by name
*
* @param name name of bound object
* @exception NamingException if an error occurs
*/
public Object lookup(Name name)
throws NamingException
{
if(__log.isDebugEnabled())__log.debug("Looking up name=\""+name+"\"");
Name cname = toCanonicalName(name);
if ((cname == null) || (cname.size() == 0))
{
__log.debug("Null or empty name, returning copy of this context");
NamingContext ctx = new NamingContext (_env, _name, _parent, _parser);
ctx._bindings=_bindings;
return ctx;
}
if (cname.size() == 1)
{
Binding binding = getBinding (cname);
if (binding == null)
{
NameNotFoundException nnfe = new NameNotFoundException();
nnfe.setRemainingName(cname);
throw nnfe;
}
Object o = binding.getObject();
//handle links by looking up the link
if (o instanceof LinkRef)
{
//if link name starts with ./ it is relative to current context
String linkName = ((LinkRef)o).getLinkName();
if (linkName.startsWith("./"))
return this.lookup (linkName.substring(2));
else
{
//link name is absolute
InitialContext ictx = new InitialContext();
return ictx.lookup (linkName);
}
}
else if (o instanceof Reference)
{
//deference the object
try
{
return NamingManager.getObjectInstance(o, cname, this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
else
return o;
}
//it is a multipart name, recurse to the first subcontext
String firstComponent = cname.get(0);
Object ctx = null;
if (firstComponent.equals(""))
ctx = this;
else
{
Binding binding = getBinding (firstComponent);
if (binding == null)
{
NameNotFoundException nnfe = new NameNotFoundException();
nnfe.setRemainingName(cname);
throw nnfe;
}
//as we have bound a reference to an object factory
//for the component specific contexts
//at "comp" we need to resolve the reference
ctx = binding.getObject();
if (ctx instanceof Reference)
{
//deference the object
try
{
ctx = NamingManager.getObjectInstance(ctx, getNameParser("").parse(firstComponent), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
}
if (!(ctx instanceof Context))
throw new NotContextException();
return ((Context)ctx).lookup (cname.getSuffix(1));
}
/*------------------------------------------------*/
/**
* Lookup binding of an object by name
*
* @param name name of bound object
* @return object bound to name
* @exception NamingException if an error occurs
*/
public Object lookup (String name)
throws NamingException
{
return lookup (_parser.parse(name));
}
/*------------------------------------------------*/
/**
* Lookup link bound to name
*
* @param name name of link binding
* @return LinkRef or plain object bound at name
* @exception NamingException if an error occurs
*/
public Object lookupLink (Name name)
throws NamingException
{
Name cname = toCanonicalName(name);
if (cname == null)
{
NamingContext ctx = new NamingContext (_env, _name, _parent, _parser);
ctx._bindings=_bindings;
return ctx;
}
if (cname.size() == 0)
throw new NamingException ("Name is empty");
if (cname.size() == 1)
{
Binding binding = getBinding (cname);
if (binding == null)
throw new NameNotFoundException();
Object o = binding.getObject();
//handle links by looking up the link
if (o instanceof Reference)
{
//deference the object
try
{
return NamingManager.getObjectInstance(o, cname.getPrefix(1), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
else
{
//object is either a LinkRef which we don't dereference
//or a plain object in which case spec says we return it
return o;
}
}
//it is a multipart name, recurse to the first subcontext
String firstComponent = cname.get(0);
Object ctx = null;
if (firstComponent.equals(""))
ctx = this;
else
{
Binding binding = getBinding (firstComponent);
if (binding == null)
throw new NameNotFoundException ();
ctx = binding.getObject();
if (ctx instanceof Reference)
{
//deference the object
try
{
ctx = NamingManager.getObjectInstance(ctx, getNameParser("").parse(firstComponent), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
}
if (!(ctx instanceof Context))
throw new NotContextException();
return ((Context)ctx).lookup (cname.getSuffix(1));
}
/*------------------------------------------------*/
/**
* Lookup link bound to name
*
* @param name name of link binding
* @return LinkRef or plain object bound at name
* @exception NamingException if an error occurs
*/
public Object lookupLink (String name)
throws NamingException
{
return lookupLink (_parser.parse(name));
}
/*------------------------------------------------*/
/**
* List all names bound at Context named by Name
*
* @param name a <code>Name</code> value
* @return a <code>NamingEnumeration</code> value
* @exception NamingException if an error occurs
*/
public NamingEnumeration list(Name name)
throws NamingException
{
if(__log.isDebugEnabled())__log.debug("list() on Context="+getName()+" for name="+name);
Name cname = toCanonicalName(name);
if (cname == null)
{
return new NameEnumeration(__empty.iterator());
}
if (cname.size() == 0)
{
return new NameEnumeration (_bindings.values().iterator());
}
//multipart name
String firstComponent = cname.get(0);
Object ctx = null;
if (firstComponent.equals(""))
ctx = this;
else
{
Binding binding = getBinding (firstComponent);
if (binding == null)
throw new NameNotFoundException ();
ctx = binding.getObject();
if (ctx instanceof Reference)
{
//deference the object
if(__log.isDebugEnabled())__log.debug("Dereferencing Reference for "+name.get(0));
try
{
ctx = NamingManager.getObjectInstance(ctx, getNameParser("").parse(firstComponent), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
}
if (!(ctx instanceof Context))
throw new NotContextException();
return ((Context)ctx).list (cname.getSuffix(1));
}
/*------------------------------------------------*/
/**
* List all names bound at Context named by Name
*
* @param name a <code>Name</code> value
* @return a <code>NamingEnumeration</code> value
* @exception NamingException if an error occurs
*/
public NamingEnumeration list(String name)
throws NamingException
{
return list(_parser.parse(name));
}
/*------------------------------------------------*/
/**
* List all Bindings present at Context named by Name
*
* @param name a <code>Name</code> value
* @return a <code>NamingEnumeration</code> value
* @exception NamingException if an error occurs
*/
public NamingEnumeration listBindings(Name name)
throws NamingException
{
Name cname = toCanonicalName (name);
if (cname == null)
{
return new BindingEnumeration(__empty.iterator());
}
if (cname.size() == 0)
{
return new BindingEnumeration (_bindings.values().iterator());
}
//multipart name
String firstComponent = cname.get(0);
Object ctx = null;
//if a name has a leading "/" it is parsed as "" so ignore it by staying
//at this level in the tree
if (firstComponent.equals(""))
ctx = this;
else
{
//it is a non-empty name component
Binding binding = getBinding (firstComponent);
if (binding == null)
throw new NameNotFoundException ();
ctx = binding.getObject();
if (ctx instanceof Reference)
{
//deference the object
try
{
ctx = NamingManager.getObjectInstance(ctx, getNameParser("").parse(firstComponent), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
}
if (!(ctx instanceof Context))
throw new NotContextException();
return ((Context)ctx).listBindings (cname.getSuffix(1));
}
/*------------------------------------------------*/
/**
* List all Bindings at Name
*
* @param name a <code>String</code> value
* @return a <code>NamingEnumeration</code> value
* @exception NamingException if an error occurs
*/
public NamingEnumeration listBindings(String name)
throws NamingException
{
return listBindings (_parser.parse(name));
}
/*------------------------------------------------*/
/**
* Overwrite or create a binding
*
* @param name a <code>Name</code> value
* @param obj an <code>Object</code> value
* @exception NamingException if an error occurs
*/
public void rebind(Name name,
Object obj)
throws NamingException
{
if (isLocked())
throw new NamingException ("This context is immutable");
Name cname = toCanonicalName(name);
if (cname == null)
throw new NamingException ("Name is null");
if (cname.size() == 0)
throw new NamingException ("Name is empty");
//if no subcontexts, just bind it
if (cname.size() == 1)
{
//check if it is a Referenceable
Object objToBind = NamingManager.getStateToBind(obj, name, this, _env);
if (objToBind instanceof Referenceable)
{
objToBind = ((Referenceable)objToBind).getReference();
}
removeBinding(cname);
addBinding (cname, objToBind);
}
else
{
//walk down the subcontext hierarchy
if(__log.isDebugEnabled())__log.debug("Checking for existing binding for name="+cname+" for first element of name="+cname.get(0));
String firstComponent = cname.get(0);
Object ctx = null;
if (firstComponent.equals(""))
ctx = this;
else
{
Binding binding = getBinding (name.get(0));
if (binding == null)
throw new NameNotFoundException (name.get(0)+ " is not bound");
ctx = binding.getObject();
if (ctx instanceof Reference)
{
//deference the object
try
{
ctx = NamingManager.getObjectInstance(ctx, getNameParser("").parse(firstComponent), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
}
if (ctx instanceof Context)
{
((Context)ctx).rebind (cname.getSuffix(1), obj);
}
else
throw new NotContextException ("Object bound at "+firstComponent +" is not a Context");
}
}
/*------------------------------------------------*/
/**
* Overwrite or create a binding from Name to Object
*
* @param name a <code>String</code> value
* @param obj an <code>Object</code> value
* @exception NamingException if an error occurs
*/
public void rebind (String name,
Object obj)
throws NamingException
{
rebind (_parser.parse(name), obj);
}
/*------------------------------------------------*/
/**
* Not supported.
*
* @param name a <code>String</code> value
* @exception NamingException if an error occurs
*/
public void unbind (String name)
throws NamingException
{
unbind(_parser.parse(name));
}
/*------------------------------------------------*/
/**
* Not supported.
*
* @param name a <code>String</code> value
* @exception NamingException if an error occurs
*/
public void unbind (Name name)
throws NamingException
{
if (name.size() == 0)
return;
if (isLocked())
throw new NamingException ("This context is immutable");
Name cname = toCanonicalName(name);
if (cname == null)
throw new NamingException ("Name is null");
if (cname.size() == 0)
throw new NamingException ("Name is empty");
//if no subcontexts, just unbind it
if (cname.size() == 1)
{
removeBinding (cname);
}
else
{
//walk down the subcontext hierarchy
if(__log.isDebugEnabled())__log.debug("Checking for existing binding for name="+cname+" for first element of name="+cname.get(0));
String firstComponent = cname.get(0);
Object ctx = null;
if (firstComponent.equals(""))
ctx = this;
else
{
Binding binding = getBinding (name.get(0));
if (binding == null)
throw new NameNotFoundException (name.get(0)+ " is not bound");
ctx = binding.getObject();
if (ctx instanceof Reference)
{
//deference the object
try
{
ctx = NamingManager.getObjectInstance(ctx, getNameParser("").parse(firstComponent), this, _env);
}
catch (NamingException e)
{
throw e;
}
catch (Exception e)
{
__log.warn("",e);
throw new NamingException (e.getMessage());
}
}
}
if (ctx instanceof Context)
{
((Context)ctx).unbind (cname.getSuffix(1));
}
else
throw new NotContextException ("Object bound at "+firstComponent +" is not a Context");
}
}
/*------------------------------------------------*/
/**
* Not supported
*
* @param oldName a <code>Name</code> value
* @param newName a <code>Name</code> value
* @exception NamingException if an error occurs
*/
public void rename(Name oldName,
Name newName)
throws NamingException
{
throw new OperationNotSupportedException();
}
/*------------------------------------------------*/
/**
* Not supported
*
* @param oldName a <code>Name</code> value
* @param newName a <code>Name</code> value
* @exception NamingException if an error occurs
*/ public void rename(String oldName,
String newName)
throws NamingException
{
throw new OperationNotSupportedException();
}
/*------------------------------------------------*/
/** Join two names together. These are treated as
* CompoundNames.
*
* @param name a <code>Name</code> value
* @param prefix a <code>Name</code> value
* @return a <code>Name</code> value
* @exception NamingException if an error occurs
*/
public Name composeName(Name name,
Name prefix)
throws NamingException
{
if (name == null)
throw new NamingException ("Name cannot be null");
if (prefix == null)
throw new NamingException ("Prefix cannot be null");
Name compoundName = (CompoundName)prefix.clone();
compoundName.addAll (name);
return compoundName;
}
/*------------------------------------------------*/
/** Join two names together. These are treated as
* CompoundNames.
*
* @param name a <code>Name</code> value
* @param prefix a <code>Name</code> value
* @return a <code>Name</code> value
* @exception NamingException if an error occurs
*/
public String composeName (String name,
String prefix)
throws NamingException
{
if (name == null)
throw new NamingException ("Name cannot be null");
if (prefix == null)
throw new NamingException ("Prefix cannot be null");
Name compoundName = _parser.parse(prefix);
compoundName.add (name);
return compoundName.toString();
}
/*------------------------------------------------*/
/**
* Do nothing
*
* @exception NamingException if an error occurs
*/
public void close ()
throws NamingException
{
}
/*------------------------------------------------*/
/**
* Return a NameParser for this Context.
*
* @param name a <code>Name</code> value
* @return a <code>NameParser</code> value
*/
public NameParser getNameParser (Name name)
{
return _parser;
}
/*------------------------------------------------*/
/**
* Return a NameParser for this Context.
*
* @param name a <code>Name</code> value
* @return a <code>NameParser</code> value
*/
public NameParser getNameParser (String name)
{
return _parser;
}
/*------------------------------------------------*/
/**
* Get the full name of this Context node
* by visiting it's ancestors back to root.
*
* NOTE: if this Context has a URL namespace then
* the URL prefix will be missing
*
* @return the full name of this Context
* @exception NamingException if an error occurs
*/
public String getNameInNamespace ()
throws NamingException
{
Name name = _parser.parse("");
NamingContext c = this;
while (c != null)
{
String str = c.getName();
if (str != null)
name.add(0, str);
c = (NamingContext)c.getParent();
}
return name.toString();
}
/*------------------------------------------------*/
/**
* Add an environment setting to this Context
*
* @param propName name of the property to add
* @param propVal value of the property to add
* @return propVal or previous value of the property
* @exception NamingException if an error occurs
*/
public Object addToEnvironment(String propName,
Object propVal)
throws NamingException
{
if (isLocked() && !(propName.equals(UNLOCK_PROPERTY)))
throw new NamingException ("This context is immutable");
return _env.put (propName, propVal);
}
/*------------------------------------------------*/
/**
* Remove a property from this Context's environment.
*
* @param propName name of property to remove
* @return value of property or null if it didn't exist
* @exception NamingException if an error occurs
*/
public Object removeFromEnvironment(String propName)
throws NamingException
{
if (isLocked())
throw new NamingException ("This context is immutable");
return _env.remove (propName);
}
/*------------------------------------------------*/
/**
* Get the environment of this Context.
*
* @return a copy of the environment of this Context.
*/
public Hashtable getEnvironment ()
{
return (Hashtable)_env.clone();
}
/*------------------------------------------------*/
/**
* Add a name to object binding to this Context.
*
* @param name a <code>Name</code> value
* @param obj an <code>Object</code> value
*/
protected void addBinding (Name name, Object obj) throws NameAlreadyBoundException
{
String key = name.toString();
Binding binding=new Binding (key, obj);
Collection<Listener> list = findListeners();
for (Listener listener : list)
{
binding=listener.bind(this,binding);
if (binding==null)
break;
}
if(__log.isDebugEnabled())
__log.debug("Adding binding with key="+key+" obj="+obj+" for context="+_name+" as "+binding);
if (binding!=null)
{
if (_bindings.containsKey(key))
throw new NameAlreadyBoundException(name.toString());
_bindings.put(key,binding);
}
}
/*------------------------------------------------*/
/**
* Get a name to object binding from this Context
*
* @param name a <code>Name</code> value
* @return a <code>Binding</code> value
*/
protected Binding getBinding (Name name)
{
return (Binding) _bindings.get(name.toString());
}
/*------------------------------------------------*/
/**
* Get a name to object binding from this Context
*
* @param name as a String
* @return null or the Binding
*/
protected Binding getBinding (String name)
{
return (Binding) _bindings.get(name);
}
/*------------------------------------------------*/
protected void removeBinding (Name name)
{
String key = name.toString();
if (__log.isDebugEnabled())
__log.debug("Removing binding with key="+key);
Binding binding = _bindings.remove(key);
if (binding!=null)
{
Collection<Listener> list = findListeners();
for (Listener listener : list)
listener.unbind(this,binding);
}
}
/*------------------------------------------------*/
/**
* Remove leading or trailing empty components from
* name. Eg "/comp/env/" -> "comp/env"
*
* @param name the name to normalize
* @return normalized name
*/
public Name toCanonicalName (Name name)
{
Name canonicalName = name;
if (name != null)
{
if (canonicalName.size() > 1)
{
if (canonicalName.get(0).equals(""))
canonicalName = canonicalName.getSuffix(1);
if (canonicalName.get(canonicalName.size()-1).equals(""))
canonicalName = canonicalName.getPrefix(canonicalName.size()-1);
}
}
return canonicalName;
}
/* ------------------------------------------------------------ */
private boolean isLocked()
{
if ((_env.get(LOCK_PROPERTY) == null) && (_env.get(UNLOCK_PROPERTY) == null))
return false;
Object lockKey = _env.get(LOCK_PROPERTY);
Object unlockKey = _env.get(UNLOCK_PROPERTY);
if ((lockKey != null) && (unlockKey != null) && (lockKey.equals(unlockKey)))
return false;
return true;
}
/* ------------------------------------------------------------ */
public String dump()
{
StringBuilder buf = new StringBuilder();
try
{
dump(buf,"");
}
catch(Exception e)
{
__log.warn(e);
}
return buf.toString();
}
/* ------------------------------------------------------------ */
public void dump(Appendable out,String indent) throws IOException
{
out.append(this.getClass().getSimpleName()).append("@").append(Long.toHexString(this.hashCode())).append("\n");
int size=_bindings.size();
int i=0;
for (Map.Entry<String,Binding> entry : ((Map<String,Binding>)_bindings).entrySet())
{
boolean last=++i==size;
out.append(indent).append(" +- ").append(entry.getKey()).append(": ");
Binding binding = entry.getValue();
Object value = binding.getObject();
if ("comp".equals(entry.getKey()) && value instanceof Reference && "org.eclipse.jetty.jndi.ContextFactory".equals(((Reference)value).getFactoryClassName()))
{
ContextFactory.dump(out,indent+(last?" ":" | "));
}
else if (value instanceof Dumpable)
{
((Dumpable)value).dump(out,indent+(last?" ":" | "));
}
else
{
out.append(value.getClass().getSimpleName()).append("=");
out.append(String.valueOf(value).replace('\n','|').replace('\r','|'));
out.append("\n");
}
}
}
private Collection<Listener> findListeners()
{
Collection<Listener> list = new ArrayList<Listener>();
NamingContext ctx=this;
while (ctx!=null)
{
if (ctx._listeners!=null)
list.addAll(ctx._listeners);
ctx=(NamingContext)ctx.getParent();
}
return list;
}
public void addListener(Listener listener)
{
if (_listeners==null)
_listeners=new ArrayList<Listener>();
_listeners.add(listener);
}
public boolean removeListener(Listener listener)
{
return _listeners.remove(listener);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.core;
import java.io.IOException;
import java.util.Locale;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
/**
* Wrapper around a <code>javax.servlet.http.HttpServletResponse</code>
* that transforms an application response object (which might be the original
* one passed to a servlet, or might be based on the 2.3
* <code>javax.servlet.http.HttpServletResponseWrapper</code> class)
* back into an internal <code>org.apache.catalina.HttpResponse</code>.
* <p>
* <strong>WARNING</strong>: Due to Java's lack of support for multiple
* inheritance, all of the logic in <code>ApplicationResponse</code> is
* duplicated in <code>ApplicationHttpResponse</code>. Make sure that you
* keep these two classes in synchronization when making changes!
*
* @author Craig R. McClanahan
* @version $Id: ApplicationHttpResponse.java 1188832 2011-10-25 17:53:29Z markt $
*/
class ApplicationHttpResponse extends HttpServletResponseWrapper {
// ----------------------------------------------------------- Constructors
/**
* Construct a new wrapped response around the specified servlet response.
*
* @param response The servlet response being wrapped
*/
@Deprecated
public ApplicationHttpResponse(HttpServletResponse response) {
this(response, false);
}
/**
* Construct a new wrapped response around the specified servlet response.
*
* @param response The servlet response being wrapped
* @param included <code>true</code> if this response is being processed
* by a <code>RequestDispatcher.include()</code> call
*/
public ApplicationHttpResponse(HttpServletResponse response,
boolean included) {
super(response);
setIncluded(included);
}
// ----------------------------------------------------- Instance Variables
/**
* Is this wrapped response the subject of an <code>include()</code>
* call?
*/
protected boolean included = false;
/**
* Descriptive information about this implementation.
*/
protected static final String info =
"org.apache.catalina.core.ApplicationHttpResponse/1.0";
// ------------------------------------------------ ServletResponse Methods
/**
* Disallow <code>reset()</code> calls on a included response.
*
* @exception IllegalStateException if the response has already
* been committed
*/
@Override
public void reset() {
// If already committed, the wrapped response will throw ISE
if (!included || getResponse().isCommitted())
getResponse().reset();
}
/**
* Disallow <code>setContentLength()</code> calls on an included response.
*
* @param len The new content length
*/
@Override
public void setContentLength(int len) {
if (!included)
getResponse().setContentLength(len);
}
/**
* Disallow <code>setContentType()</code> calls on an included response.
*
* @param type The new content type
*/
@Override
public void setContentType(String type) {
if (!included)
getResponse().setContentType(type);
}
/**
* Disallow <code>setLocale()</code> calls on an included response.
*
* @param loc The new locale
*/
@Override
public void setLocale(Locale loc) {
if (!included)
getResponse().setLocale(loc);
}
/**
* Ignore <code>setBufferSize()</code> calls on an included response.
*
* @param size The buffer size
*/
@Override
public void setBufferSize(int size) {
if (!included)
getResponse().setBufferSize(size);
}
// -------------------------------------------- HttpServletResponse Methods
/**
* Disallow <code>addCookie()</code> calls on an included response.
*
* @param cookie The new cookie
*/
@Override
public void addCookie(Cookie cookie) {
if (!included)
((HttpServletResponse) getResponse()).addCookie(cookie);
}
/**
* Disallow <code>addDateHeader()</code> calls on an included response.
*
* @param name The new header name
* @param value The new header value
*/
@Override
public void addDateHeader(String name, long value) {
if (!included)
((HttpServletResponse) getResponse()).addDateHeader(name, value);
}
/**
* Disallow <code>addHeader()</code> calls on an included response.
*
* @param name The new header name
* @param value The new header value
*/
@Override
public void addHeader(String name, String value) {
if (!included)
((HttpServletResponse) getResponse()).addHeader(name, value);
}
/**
* Disallow <code>addIntHeader()</code> calls on an included response.
*
* @param name The new header name
* @param value The new header value
*/
@Override
public void addIntHeader(String name, int value) {
if (!included)
((HttpServletResponse) getResponse()).addIntHeader(name, value);
}
/**
* Disallow <code>sendError()</code> calls on an included response.
*
* @param sc The new status code
*
* @exception IOException if an input/output error occurs
*/
@Override
public void sendError(int sc) throws IOException {
if (!included)
((HttpServletResponse) getResponse()).sendError(sc);
}
/**
* Disallow <code>sendError()</code> calls on an included response.
*
* @param sc The new status code
* @param msg The new message
*
* @exception IOException if an input/output error occurs
*/
@Override
public void sendError(int sc, String msg) throws IOException {
if (!included)
((HttpServletResponse) getResponse()).sendError(sc, msg);
}
/**
* Disallow <code>sendRedirect()</code> calls on an included response.
*
* @param location The new location
*
* @exception IOException if an input/output error occurs
*/
@Override
public void sendRedirect(String location) throws IOException {
if (!included)
((HttpServletResponse) getResponse()).sendRedirect(location);
}
/**
* Disallow <code>setDateHeader()</code> calls on an included response.
*
* @param name The new header name
* @param value The new header value
*/
@Override
public void setDateHeader(String name, long value) {
if (!included)
((HttpServletResponse) getResponse()).setDateHeader(name, value);
}
/**
* Disallow <code>setHeader()</code> calls on an included response.
*
* @param name The new header name
* @param value The new header value
*/
@Override
public void setHeader(String name, String value) {
if (!included)
((HttpServletResponse) getResponse()).setHeader(name, value);
}
/**
* Disallow <code>setIntHeader()</code> calls on an included response.
*
* @param name The new header name
* @param value The new header value
*/
@Override
public void setIntHeader(String name, int value) {
if (!included)
((HttpServletResponse) getResponse()).setIntHeader(name, value);
}
/**
* Disallow <code>setStatus()</code> calls on an included response.
*
* @param sc The new status code
*/
@Override
public void setStatus(int sc) {
if (!included)
((HttpServletResponse) getResponse()).setStatus(sc);
}
/**
* Disallow <code>setStatus()</code> calls on an included response.
*
* @param sc The new status code
* @param msg The new message
* @deprecated As of version 2.1, due to ambiguous meaning of the message
* parameter. To set a status code use
* <code>setStatus(int)</code>, to send an error with a
* description use <code>sendError(int, String)</code>.
*/
@Deprecated
@Override
public void setStatus(int sc, String msg) {
if (!included)
((HttpServletResponse) getResponse()).setStatus(sc, msg);
}
// -------------------------------------------------------- Package Methods
/**
* Return descriptive information about this implementation.
*/
public String getInfo() {
return (info);
}
/**
* Return the included flag for this response.
*/
@Deprecated
boolean isIncluded() {
return (this.included);
}
/**
* Set the included flag for this response.
*
* @param included The new included flag
*/
void setIncluded(boolean included) {
this.included = included;
}
/**
* Set the response that we are wrapping.
*
* @param response The new wrapped response
*/
void setResponse(HttpServletResponse response) {
super.setResponse(response);
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.workbench.wi.backend.server.dd;
import java.lang.reflect.Field;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.guvnor.common.services.shared.message.Level;
import org.guvnor.common.services.shared.validation.model.ValidationMessage;
import org.jbpm.workbench.wi.dd.model.DeploymentDescriptorModel;
import org.jbpm.workbench.wi.dd.model.ItemObjectModel;
import org.jbpm.workbench.wi.dd.validation.DeploymentDescriptorValidationMessage;
import org.jgroups.util.UUID;
import org.junit.Test;
import org.kie.internal.runtime.conf.AuditMode;
import org.kie.internal.runtime.conf.DeploymentDescriptor;
import org.kie.internal.runtime.conf.NamedObjectModel;
import org.kie.internal.runtime.conf.ObjectModel;
import org.kie.internal.runtime.conf.PersistenceMode;
import org.kie.internal.runtime.conf.RuntimeStrategy;
import static org.assertj.core.api.Assertions.*;
public class DDEditorServiceTest extends DDEditorServiceImpl {
private static Random random = new Random();
private static ObjectModel getObjectModelParameter(String resolver) {
return new ObjectModel(resolver,
UUID.randomUUID().toString(),
Integer.toString(random.nextInt(100000)));
}
@Test
@SuppressWarnings("unchecked")
public void marshalUnmarshalTest() throws Exception {
// setup
Map<String, Field> depDescFields = new HashMap<>();
Map<String, Field> depDescModelFields = new HashMap<>();
Map<String, Field>[] fieldMaps = new Map[]{depDescFields, depDescModelFields};
Field[][] fields = new Field[][]{DeploymentDescriptorImpl.class.getDeclaredFields(),
DeploymentDescriptorModel.class.getDeclaredFields()};
for (int i = 0; i < fieldMaps.length; ++i) {
for (Field field : fields[i]) {
fieldMaps[i].put(field.getName(),
field);
}
}
// test data
DeploymentDescriptorImpl origDepDesc = new DeploymentDescriptorImpl();
origDepDesc.setAuditMode(AuditMode.JMS);
origDepDesc.setAuditPersistenceUnit("audit-persist");
origDepDesc.setClasses(Collections.singletonList("class1"));
origDepDesc.setLimitSerializationClasses(true);
origDepDesc.setPersistenceMode(PersistenceMode.JPA);
origDepDesc.setPersistenceUnit("save-thingy");
origDepDesc.setRequiredRoles(Collections.singletonList("roles"));
origDepDesc.setRuntimeStrategy(RuntimeStrategy.PER_PROCESS_INSTANCE);
String resolver = "config-resolver";
origDepDesc.setConfiguration(Collections.singletonList(
new NamedObjectModel(resolver,
"config-name",
"classname",
getObjectModelParameter(resolver))));
resolver = "env-resolver";
origDepDesc.setEnvironmentEntries(Collections.singletonList(
new NamedObjectModel(resolver,
"env-name",
"classname",
getObjectModelParameter(resolver))));
resolver = "event-resolver";
origDepDesc.setEventListeners(
Collections.singletonList(new ObjectModel(resolver,
"listener-id",
getObjectModelParameter(resolver))));
resolver = "glob-resolver";
origDepDesc.setGlobals(Collections.singletonList(
new NamedObjectModel(resolver,
"glob-name",
"classname",
getObjectModelParameter(resolver))));
resolver = "marsh-resolver";
origDepDesc.setMarshallingStrategies(
Collections.singletonList(new ObjectModel(resolver,
"marsh-id",
getObjectModelParameter(resolver))));
resolver = "task-resolver";
origDepDesc.setTaskEventListeners(
Collections.singletonList(new ObjectModel(resolver,
"listener-id",
getObjectModelParameter(resolver))));
resolver = "work-resolver";
origDepDesc.setWorkItemHandlers(Collections.singletonList(
new NamedObjectModel(resolver,
"item-name",
"handler-classname",
getObjectModelParameter(resolver))));
// round trip DeploymenDescriptor
DeploymentDescriptorModel origModel = marshal(origDepDesc);
// path argument not used in unmarshal??
DeploymentDescriptor copyDepDesc = unmarshal(null,
origModel);
// compare round-tripped DeploymentDescriptor
assertThat(origDepDesc).isEqualToIgnoringGivenFields(copyDepDesc, "mappedRoles");
// round trip DeploymenDescriptorModel
DeploymentDescriptorModel copyModel = marshal(copyDepDesc);
// compare round-tripped DeploymentDescriptorModel
assertThat(origModel).isEqualToComparingFieldByFieldRecursively(copyModel);
}
@Test
public void testValidateEmptyResolver() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(null,
"item-name",
"handler-classname")));
assertThat(validationMessages).hasSize(1);
ValidationMessage error = validationMessages.get(0);
assertThat(error.getLevel()).isEqualTo(Level.ERROR);
assertThat(error.getText()).startsWith("No resolver selected");
validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel("",
"item-name",
"handler-classname")));
assertThat(validationMessages.size()).isEqualTo(1);
error = validationMessages.get(0);
assertThat(error.getLevel()).isEqualTo(Level.ERROR);
assertThat(error.getText()).startsWith("Not valid resolver selected");
assertThat(error).isInstanceOf(DeploymentDescriptorValidationMessage.class);
assertThat(((DeploymentDescriptorValidationMessage) error).getKey()).isEqualTo(I18N_KEY_NOT_VALID_RESOLVER);
}
@Test
public void testValidateReflectionResolver() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.REFLECTION_RESOLVER,
"item-name",
"java.lang.String")));
assertThat(validationMessages).isEmpty();
}
@Test
public void testValidateReflectionResolverInvalid() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.REFLECTION_RESOLVER,
"item-name",
"handler-classname")));
assertThat(validationMessages).hasSize(1);
ValidationMessage error = validationMessages.get(0);
assertThat(error.getLevel()).isEqualTo(Level.ERROR);
assertThat(error.getText()).startsWith("Identifier is not valid Java class which is required by reflection resolver");
assertThat(error).isInstanceOf(DeploymentDescriptorValidationMessage.class);
assertThat(((DeploymentDescriptorValidationMessage) error).getKey()).isEqualTo(I18N_KEY_NOT_VALID_REFLECTION_IDENTIFIER);
}
@Test
public void testValidateReflectionResolverMissingIdentifier() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.REFLECTION_RESOLVER,
"item-name",
"")));
assertThat(validationMessages).hasSize(2);
ValidationMessage error = validationMessages.get(0);
assertThat(error.getLevel()).isEqualTo(Level.ERROR);
assertThat(error.getText()).startsWith("Identifier cannot be empty");
assertThat(error).isInstanceOf(DeploymentDescriptorValidationMessage.class);
assertThat(((DeploymentDescriptorValidationMessage) error).getKey()).isEqualTo(I18N_KEY_MISSING_IDENTIFIER);
error = validationMessages.get(1);
assertThat(error.getLevel()).isEqualTo(Level.ERROR);
assertThat(error.getText()).startsWith("Identifier is not valid Java class which is required by reflection resolver");
assertThat(error).isInstanceOf(DeploymentDescriptorValidationMessage.class);
assertThat(((DeploymentDescriptorValidationMessage) error).getKey()).isEqualTo(I18N_KEY_NOT_VALID_REFLECTION_IDENTIFIER);
}
@Test
public void testValidateReflectionResolverNameEmpty() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.REFLECTION_RESOLVER,
"",
"java.lang.String")));
assertThat(validationMessages).hasSize(1);
ValidationMessage error = validationMessages.get(0);
assertThat(error.getLevel()).isEqualTo(Level.ERROR);
assertThat(error.getText()).startsWith("Name cannot be empty");
assertThat(error).isInstanceOf(DeploymentDescriptorValidationMessage.class);
assertThat(((DeploymentDescriptorValidationMessage) error).getKey()).isEqualTo(I18N_KEY_MISSING_NAME);
}
@Test
public void testValidateMvelResolver() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.MVEL_RESOLVER,
"item-name",
"new String()")));
assertThat(validationMessages).isEmpty();
}
@Test
public void testValidateMvelResolverWithValidParameters() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.MVEL_RESOLVER,
"item-name",
"java.util.Arrays.asList(ksession, taskService, runtimeManager, classLoader, entityManagerFactory);")));
assertThat(validationMessages).isEmpty();
List<ValidationMessage> secondValidationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.MVEL_RESOLVER,
"item-name",
"org.jbpm.process.workitem.bpmn2.ServiceTaskHandler(ksession, classLoader)")));
assertThat(secondValidationMessages).isEmpty();
}
@Test
public void testValidateMvelResolverWithInvalidParameters() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.MVEL_RESOLVER,
"item-name",
"java.util.Arrays.asList(invalidParamExpr[);")));
assertThat(validationMessages).hasSize(1);
ValidationMessage error = validationMessages.get(0);
assertThat(error.getLevel()).isEqualTo(Level.WARNING);
assertThat(error.getText()).startsWith("Could not compile mvel expression");
assertThat(error).isInstanceOf(DeploymentDescriptorValidationMessage.class);
assertThat(((DeploymentDescriptorValidationMessage) error).getKey()).isEqualTo(I18N_KEY_NOT_VALID_MVEL_IDENTIFIER);
}
@Test
public void testValidateMvelResolverInvalid() {
List<ValidationMessage> validationMessages = validateObjectModels(null,
Collections.singletonList(
new NamedObjectModel(ItemObjectModel.MVEL_RESOLVER,
"item-name",
"org.jbpm.process.workitem.bpmn2.ServiceTaskHandler(ksession, classLoader")));
assertThat(validationMessages).hasSize(1);
ValidationMessage error = validationMessages.get(0);
assertThat(error.getLevel()).isEqualTo(Level.WARNING);
assertThat(error.getText()).startsWith("Could not compile mvel expression");
assertThat(error).isInstanceOf(DeploymentDescriptorValidationMessage.class);
assertThat(((DeploymentDescriptorValidationMessage) error).getKey()).isEqualTo(I18N_KEY_NOT_VALID_MVEL_IDENTIFIER);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.